xref: /openbmc/openbmc/poky/meta/lib/oe/package.py (revision 8460358c3d24c71d9d38fd126c745854a6301564)
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import errno
8import fnmatch
9import itertools
10import os
11import shlex
12import re
13import glob
14import stat
15import mmap
16import subprocess
17import shutil
18
19import oe.cachedpath
20
21def runstrip(arg):
22    # Function to strip a single file, called from split_and_strip_files below
23    # A working 'file' (one which works on the target architecture)
24    #
25    # The elftype is a bit pattern (explained in is_elf below) to tell
26    # us what type of file we're processing...
27    # 4 - executable
28    # 8 - shared library
29    # 16 - kernel module
30
31    if len(arg) == 3:
32        (file, elftype, strip) = arg
33        extra_strip_sections = ''
34    else:
35        (file, elftype, strip, extra_strip_sections) = arg
36
37    newmode = None
38    if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
39        origmode = os.stat(file)[stat.ST_MODE]
40        newmode = origmode | stat.S_IWRITE | stat.S_IREAD
41        os.chmod(file, newmode)
42
43    stripcmd = [strip]
44    skip_strip = False
45    # kernel module
46    if elftype & 16:
47        if is_kernel_module_signed(file):
48            bb.debug(1, "Skip strip on signed module %s" % file)
49            skip_strip = True
50        else:
51            stripcmd.extend(["--strip-debug", "--remove-section=.comment",
52                "--remove-section=.note", "--preserve-dates"])
53    # .so and shared library
54    elif ".so" in file and elftype & 8:
55        stripcmd.extend(["--remove-section=.comment", "--remove-section=.note", "--strip-unneeded"])
56    # shared or executable:
57    elif elftype & 8 or elftype & 4:
58        stripcmd.extend(["--remove-section=.comment", "--remove-section=.note"])
59        if extra_strip_sections != '':
60            for section in extra_strip_sections.split():
61                stripcmd.extend(["--remove-section=" + section])
62
63    stripcmd.append(file)
64    bb.debug(1, "runstrip: %s" % stripcmd)
65
66    if not skip_strip:
67        output = subprocess.check_output(stripcmd, stderr=subprocess.STDOUT)
68
69    if newmode:
70        os.chmod(file, origmode)
71
72# Detect .ko module by searching for "vermagic=" string
73def is_kernel_module(path):
74    with open(path) as f:
75        return mmap.mmap(f.fileno(), 0, prot=mmap.PROT_READ).find(b"vermagic=") >= 0
76
77# Detect if .ko module is signed
78def is_kernel_module_signed(path):
79    with open(path, "rb") as f:
80        f.seek(-28, 2)
81        module_tail = f.read()
82        return "Module signature appended" in "".join(chr(c) for c in bytearray(module_tail))
83
84# Return type (bits):
85# 0 - not elf
86# 1 - ELF
87# 2 - stripped
88# 4 - executable
89# 8 - shared library
90# 16 - kernel module
91def is_elf(path):
92    exec_type = 0
93    result = subprocess.check_output(["file", "-b", path], stderr=subprocess.STDOUT).decode("utf-8")
94
95    if "ELF" in result:
96        exec_type |= 1
97        if "not stripped" not in result:
98            exec_type |= 2
99        if "executable" in result:
100            exec_type |= 4
101        if "shared" in result:
102            exec_type |= 8
103        if "relocatable" in result:
104            if path.endswith(".ko") and path.find("/lib/modules/") != -1 and is_kernel_module(path):
105                exec_type |= 16
106    return (path, exec_type)
107
108def is_static_lib(path):
109    if path.endswith('.a') and not os.path.islink(path):
110        with open(path, 'rb') as fh:
111            # The magic must include the first slash to avoid
112            # matching golang static libraries
113            magic = b'!<arch>\x0a/'
114            start = fh.read(len(magic))
115            return start == magic
116    return False
117
118def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, max_process, qa_already_stripped=False):
119    """
120    Strip executable code (like executables, shared libraries) _in_place_
121    - Based on sysroot_strip in staging.bbclass
122    :param dstdir: directory in which to strip files
123    :param strip_cmd: Strip command (usually ${STRIP})
124    :param libdir: ${libdir} - strip .so files in this directory
125    :param base_libdir: ${base_libdir} - strip .so files in this directory
126    :param max_process: number of stripping processes started in parallel
127    :param qa_already_stripped: Set to True if already-stripped' in ${INSANE_SKIP}
128    This is for proper logging and messages only.
129    """
130    import stat, errno, oe.path, oe.utils
131
132    elffiles = {}
133    inodes = {}
134    libdir = os.path.abspath(dstdir + os.sep + libdir)
135    base_libdir = os.path.abspath(dstdir + os.sep + base_libdir)
136    exec_mask = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
137    #
138    # First lets figure out all of the files we may have to process
139    #
140    checkelf = []
141    inodecache = {}
142    for root, dirs, files in os.walk(dstdir):
143        for f in files:
144            file = os.path.join(root, f)
145
146            try:
147                ltarget = oe.path.realpath(file, dstdir, False)
148                s = os.lstat(ltarget)
149            except OSError as e:
150                (err, strerror) = e.args
151                if err != errno.ENOENT:
152                    raise
153                # Skip broken symlinks
154                continue
155            if not s:
156                continue
157            # Check its an excutable
158            if s[stat.ST_MODE] & exec_mask \
159                    or ((file.startswith(libdir) or file.startswith(base_libdir)) and ".so" in f) \
160                    or file.endswith('.ko'):
161                # If it's a symlink, and points to an ELF file, we capture the readlink target
162                if os.path.islink(file):
163                    continue
164
165                # It's a file (or hardlink), not a link
166                # ...but is it ELF, and is it already stripped?
167                checkelf.append(file)
168                inodecache[file] = s.st_ino
169    results = oe.utils.multiprocess_launch_mp(is_elf, checkelf, max_process)
170    for (file, elf_file) in results:
171                #elf_file = is_elf(file)
172                if elf_file & 1:
173                    if elf_file & 2:
174                        if qa_already_stripped:
175                            bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dstdir):], pn))
176                        else:
177                            bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dstdir):], pn))
178                        continue
179
180                    if inodecache[file] in inodes:
181                        os.unlink(file)
182                        os.link(inodes[inodecache[file]], file)
183                    else:
184                        # break hardlinks so that we do not strip the original.
185                        inodes[inodecache[file]] = file
186                        bb.utils.break_hardlinks(file)
187                        elffiles[file] = elf_file
188
189    #
190    # Now strip them (in parallel)
191    #
192    sfiles = []
193    for file in elffiles:
194        elf_file = int(elffiles[file])
195        sfiles.append((file, elf_file, strip_cmd))
196
197    oe.utils.multiprocess_launch_mp(runstrip, sfiles, max_process)
198
199TRANSLATE = (
200    ("@", "@at@"),
201    (" ", "@space@"),
202    ("\t", "@tab@"),
203    ("[", "@openbrace@"),
204    ("]", "@closebrace@"),
205    ("_", "@underscore@"),
206    (":", "@colon@"),
207)
208
209def file_translate(file):
210    ft = file
211    for s, replace in TRANSLATE:
212        ft = ft.replace(s, replace)
213
214    return ft
215
216def file_reverse_translate(file):
217    ft = file
218    for s, replace in reversed(TRANSLATE):
219        ft = ft.replace(replace, s)
220
221    return ft
222
223def filedeprunner(arg):
224    import re, subprocess, shlex
225
226    (pkg, pkgfiles, rpmdeps, pkgdest) = arg
227    provides = {}
228    requires = {}
229
230    file_re = re.compile(r'\s+\d+\s(.*)')
231    dep_re = re.compile(r'\s+(\S)\s+(.*)')
232    r = re.compile(r'[<>=]+\s+\S*')
233
234    def process_deps(pipe, pkg, pkgdest, provides, requires):
235        file = None
236        for line in pipe.split("\n"):
237
238            m = file_re.match(line)
239            if m:
240                file = m.group(1)
241                file = file.replace(pkgdest + "/" + pkg, "")
242                file = file_translate(file)
243                continue
244
245            m = dep_re.match(line)
246            if not m or not file:
247                continue
248
249            type, dep = m.groups()
250
251            if type == 'R':
252                i = requires
253            elif type == 'P':
254                i = provides
255            else:
256               continue
257
258            if dep.startswith("python("):
259                continue
260
261            # Ignore all perl(VMS::...) and perl(Mac::...) dependencies. These
262            # are typically used conditionally from the Perl code, but are
263            # generated as unconditional dependencies.
264            if dep.startswith('perl(VMS::') or dep.startswith('perl(Mac::'):
265                continue
266
267            # Ignore perl dependencies on .pl files.
268            if dep.startswith('perl(') and dep.endswith('.pl)'):
269                continue
270
271            # Remove perl versions and perl module versions since they typically
272            # do not make sense when used as package versions.
273            if dep.startswith('perl') and r.search(dep):
274                dep = dep.split()[0]
275
276            # Put parentheses around any version specifications.
277            dep = r.sub(r'(\g<0>)',dep)
278
279            if file not in i:
280                i[file] = []
281            i[file].append(dep)
282
283        return provides, requires
284
285    output = subprocess.check_output(shlex.split(rpmdeps) + pkgfiles, stderr=subprocess.STDOUT).decode("utf-8")
286    provides, requires = process_deps(output, pkg, pkgdest, provides, requires)
287
288    return (pkg, provides, requires)
289
290
291def read_shlib_providers(d):
292    import re
293
294    shlib_provider = {}
295    shlibs_dirs = d.getVar('SHLIBSDIRS').split()
296    list_re = re.compile(r'^(.*)\.list$')
297    # Go from least to most specific since the last one found wins
298    for dir in reversed(shlibs_dirs):
299        bb.debug(2, "Reading shlib providers in %s" % (dir))
300        if not os.path.exists(dir):
301            continue
302        for file in sorted(os.listdir(dir)):
303            m = list_re.match(file)
304            if m:
305                dep_pkg = m.group(1)
306                try:
307                    fd = open(os.path.join(dir, file))
308                except IOError:
309                    # During a build unrelated shlib files may be deleted, so
310                    # handle files disappearing between the listdirs and open.
311                    continue
312                lines = fd.readlines()
313                fd.close()
314                for l in lines:
315                    s = l.strip().split(":")
316                    if s[0] not in shlib_provider:
317                        shlib_provider[s[0]] = {}
318                    shlib_provider[s[0]][s[1]] = (dep_pkg, s[2])
319    return shlib_provider
320
321# We generate a master list of directories to process, we start by
322# seeding this list with reasonable defaults, then load from
323# the fs-perms.txt files
324def fixup_perms(d):
325    import pwd, grp
326
327    cpath = oe.cachedpath.CachedPath()
328    dvar = d.getVar('PKGD')
329
330    # init using a string with the same format as a line as documented in
331    # the fs-perms.txt file
332    # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
333    # <path> link <link target>
334    #
335    # __str__ can be used to print out an entry in the input format
336    #
337    # if fs_perms_entry.path is None:
338    #    an error occurred
339    # if fs_perms_entry.link, you can retrieve:
340    #    fs_perms_entry.path = path
341    #    fs_perms_entry.link = target of link
342    # if not fs_perms_entry.link, you can retrieve:
343    #    fs_perms_entry.path = path
344    #    fs_perms_entry.mode = expected dir mode or None
345    #    fs_perms_entry.uid = expected uid or -1
346    #    fs_perms_entry.gid = expected gid or -1
347    #    fs_perms_entry.walk = 'true' or something else
348    #    fs_perms_entry.fmode = expected file mode or None
349    #    fs_perms_entry.fuid = expected file uid or -1
350    #    fs_perms_entry_fgid = expected file gid or -1
351    class fs_perms_entry():
352        def __init__(self, line):
353            lsplit = line.split()
354            if len(lsplit) == 3 and lsplit[1].lower() == "link":
355                self._setlink(lsplit[0], lsplit[2])
356            elif len(lsplit) == 8:
357                self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
358            else:
359                msg = "Fixup Perms: invalid config line %s" % line
360                oe.qa.handle_error("perm-config", msg, d)
361                self.path = None
362                self.link = None
363
364        def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
365            self.path = os.path.normpath(path)
366            self.link = None
367            self.mode = self._procmode(mode)
368            self.uid  = self._procuid(uid)
369            self.gid  = self._procgid(gid)
370            self.walk = walk.lower()
371            self.fmode = self._procmode(fmode)
372            self.fuid = self._procuid(fuid)
373            self.fgid = self._procgid(fgid)
374
375        def _setlink(self, path, link):
376            self.path = os.path.normpath(path)
377            self.link = link
378
379        def _procmode(self, mode):
380            if not mode or (mode and mode == "-"):
381                return None
382            else:
383                return int(mode,8)
384
385        # Note uid/gid -1 has special significance in os.lchown
386        def _procuid(self, uid):
387            if uid is None or uid == "-":
388                return -1
389            elif uid.isdigit():
390                return int(uid)
391            else:
392                return pwd.getpwnam(uid).pw_uid
393
394        def _procgid(self, gid):
395            if gid is None or gid == "-":
396                return -1
397            elif gid.isdigit():
398                return int(gid)
399            else:
400                return grp.getgrnam(gid).gr_gid
401
402        # Use for debugging the entries
403        def __str__(self):
404            if self.link:
405                return "%s link %s" % (self.path, self.link)
406            else:
407                mode = "-"
408                if self.mode:
409                    mode = "0%o" % self.mode
410                fmode = "-"
411                if self.fmode:
412                    fmode = "0%o" % self.fmode
413                uid = self._mapugid(self.uid)
414                gid = self._mapugid(self.gid)
415                fuid = self._mapugid(self.fuid)
416                fgid = self._mapugid(self.fgid)
417                return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
418
419        def _mapugid(self, id):
420            if id is None or id == -1:
421                return "-"
422            else:
423                return "%d" % id
424
425    # Fix the permission, owner and group of path
426    def fix_perms(path, mode, uid, gid, dir):
427        if mode and not os.path.islink(path):
428            #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
429            os.chmod(path, mode)
430        # -1 is a special value that means don't change the uid/gid
431        # if they are BOTH -1, don't bother to lchown
432        if not (uid == -1 and gid == -1):
433            #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
434            os.lchown(path, uid, gid)
435
436    # Return a list of configuration files based on either the default
437    # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
438    # paths are resolved via BBPATH
439    def get_fs_perms_list(d):
440        str = ""
441        bbpath = d.getVar('BBPATH')
442        fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
443        for conf_file in fs_perms_tables.split():
444            confpath = bb.utils.which(bbpath, conf_file)
445            if confpath:
446                str += " %s" % bb.utils.which(bbpath, conf_file)
447            else:
448                bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
449        return str
450
451    fs_perms_table = {}
452    fs_link_table = {}
453
454    # By default all of the standard directories specified in
455    # bitbake.conf will get 0755 root:root.
456    target_path_vars = [    'base_prefix',
457                'prefix',
458                'exec_prefix',
459                'base_bindir',
460                'base_sbindir',
461                'base_libdir',
462                'datadir',
463                'sysconfdir',
464                'servicedir',
465                'sharedstatedir',
466                'localstatedir',
467                'infodir',
468                'mandir',
469                'docdir',
470                'bindir',
471                'sbindir',
472                'libexecdir',
473                'libdir',
474                'includedir' ]
475
476    for path in target_path_vars:
477        dir = d.getVar(path) or ""
478        if dir == "":
479            continue
480        fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
481
482    # Now we actually load from the configuration files
483    for conf in get_fs_perms_list(d).split():
484        if not os.path.exists(conf):
485            continue
486        with open(conf) as f:
487            for line in f:
488                if line.startswith('#'):
489                    continue
490                lsplit = line.split()
491                if len(lsplit) == 0:
492                    continue
493                if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
494                    msg = "Fixup perms: %s invalid line: %s" % (conf, line)
495                    oe.qa.handle_error("perm-line", msg, d)
496                    continue
497                entry = fs_perms_entry(d.expand(line))
498                if entry and entry.path:
499                    if entry.link:
500                        fs_link_table[entry.path] = entry
501                        if entry.path in fs_perms_table:
502                            fs_perms_table.pop(entry.path)
503                    else:
504                        fs_perms_table[entry.path] = entry
505                        if entry.path in fs_link_table:
506                            fs_link_table.pop(entry.path)
507
508    # Debug -- list out in-memory table
509    #for dir in fs_perms_table:
510    #    bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
511    #for link in fs_link_table:
512    #    bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
513
514    # We process links first, so we can go back and fixup directory ownership
515    # for any newly created directories
516    # Process in sorted order so /run gets created before /run/lock, etc.
517    for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
518        link = entry.link
519        dir = entry.path
520        origin = dvar + dir
521        if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
522            continue
523
524        if link[0] == "/":
525            target = dvar + link
526            ptarget = link
527        else:
528            target = os.path.join(os.path.dirname(origin), link)
529            ptarget = os.path.join(os.path.dirname(dir), link)
530        if os.path.exists(target):
531            msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
532            oe.qa.handle_error("perm-link", msg, d)
533            continue
534
535        # Create path to move directory to, move it, and then setup the symlink
536        bb.utils.mkdirhier(os.path.dirname(target))
537        #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
538        bb.utils.rename(origin, target)
539        #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
540        os.symlink(link, origin)
541
542    for dir in fs_perms_table:
543        origin = dvar + dir
544        if not (cpath.exists(origin) and cpath.isdir(origin)):
545            continue
546
547        fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
548
549        if fs_perms_table[dir].walk == 'true':
550            for root, dirs, files in os.walk(origin):
551                for dr in dirs:
552                    each_dir = os.path.join(root, dr)
553                    fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
554                for f in files:
555                    each_file = os.path.join(root, f)
556                    fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
557
558# Get a list of files from file vars by searching files under current working directory
559# The list contains symlinks, directories and normal files.
560def files_from_filevars(filevars):
561    cpath = oe.cachedpath.CachedPath()
562    files = []
563    for f in filevars:
564        if os.path.isabs(f):
565            f = '.' + f
566        if not f.startswith("./"):
567            f = './' + f
568        globbed = glob.glob(f, recursive=True)
569        if globbed:
570            if [ f ] != globbed:
571                files += globbed
572                continue
573        files.append(f)
574
575    symlink_paths = []
576    for ind, f in enumerate(files):
577        # Handle directory symlinks. Truncate path to the lowest level symlink
578        parent = ''
579        for dirname in f.split('/')[:-1]:
580            parent = os.path.join(parent, dirname)
581            if dirname == '.':
582                continue
583            if cpath.islink(parent):
584                bb.warn("FILES contains file '%s' which resides under a "
585                        "directory symlink. Please fix the recipe and use the "
586                        "real path for the file." % f[1:])
587                symlink_paths.append(f)
588                files[ind] = parent
589                f = parent
590                break
591
592        if not cpath.islink(f):
593            if cpath.isdir(f):
594                newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
595                if newfiles:
596                    files += newfiles
597
598    return files, symlink_paths
599
600# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
601def get_conffiles(pkg, d):
602    pkgdest = d.getVar('PKGDEST')
603    root = os.path.join(pkgdest, pkg)
604    cwd = os.getcwd()
605    os.chdir(root)
606
607    conffiles = d.getVar('CONFFILES:%s' % pkg);
608    if conffiles == None:
609        conffiles = d.getVar('CONFFILES')
610    if conffiles == None:
611        conffiles = ""
612    conffiles = conffiles.split()
613    conf_orig_list = files_from_filevars(conffiles)[0]
614
615    # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
616    conf_list = []
617    for f in conf_orig_list:
618        if os.path.isdir(f):
619            continue
620        if os.path.islink(f):
621            continue
622        if not os.path.exists(f):
623            continue
624        conf_list.append(f)
625
626    # Remove the leading './'
627    for i in range(0, len(conf_list)):
628        conf_list[i] = conf_list[i][1:]
629
630    os.chdir(cwd)
631    return sorted(conf_list)
632
633def legitimize_package_name(s):
634    """
635    Make sure package names are legitimate strings
636    """
637
638    def fixutf(m):
639        cp = m.group(1)
640        if cp:
641            return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
642
643    # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
644    s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
645
646    # Remaining package name validity fixes
647    return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
648
649def split_locales(d):
650    cpath = oe.cachedpath.CachedPath()
651    if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
652        bb.debug(1, "package requested not splitting locales")
653        return
654
655    packages = (d.getVar('PACKAGES') or "").split()
656
657    dvar = d.getVar('PKGD')
658    pn = d.getVar('LOCALEBASEPN')
659
660    try:
661        locale_index = packages.index(pn + '-locale')
662        packages.pop(locale_index)
663    except ValueError:
664        locale_index = len(packages)
665
666    lic = d.getVar("LICENSE:" + pn + "-locale")
667
668    localepaths = []
669    locales = set()
670    for localepath in (d.getVar('LOCALE_PATHS') or "").split():
671        localedir = dvar + localepath
672        if not cpath.isdir(localedir):
673            bb.debug(1, 'No locale files in %s' % localepath)
674            continue
675
676        localepaths.append(localepath)
677        with os.scandir(localedir) as it:
678            for entry in it:
679                if entry.is_dir():
680                    locales.add(entry.name)
681
682    if len(locales) == 0:
683        bb.debug(1, "No locale files in this package")
684        return
685
686    summary = d.getVar('SUMMARY') or pn
687    description = d.getVar('DESCRIPTION') or ""
688    locale_section = d.getVar('LOCALE_SECTION')
689    mlprefix = d.getVar('MLPREFIX') or ""
690    for l in sorted(locales):
691        ln = legitimize_package_name(l)
692        pkg = pn + '-locale-' + ln
693        packages.insert(locale_index, pkg)
694        locale_index += 1
695        files = []
696        for localepath in localepaths:
697            files.append(os.path.join(localepath, l))
698        d.setVar('FILES:' + pkg, " ".join(files))
699        d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
700        d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
701        d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
702        d.setVar('DESCRIPTION:' + pkg, '%s  This package contains language translation files for the %s locale.' % (description, l))
703        if lic:
704            d.setVar('LICENSE:' + pkg, lic)
705        if locale_section:
706            d.setVar('SECTION:' + pkg, locale_section)
707
708    d.setVar('PACKAGES', ' '.join(packages))
709
710    # Disabled by RP 18/06/07
711    # Wildcards aren't supported in debian
712    # They break with ipkg since glibc-locale* will mean that
713    # glibc-localedata-translit* won't install as a dependency
714    # for some other package which breaks meta-toolchain
715    # Probably breaks since virtual-locale- isn't provided anywhere
716    #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
717    #rdep.append('%s-locale*' % pn)
718    #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
719
720def package_debug_vars(d):
721    # We default to '.debug' style
722    if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
723        # Single debug-file-directory style debug info
724        debug_vars = {
725            "append": ".debug",
726            "staticappend": "",
727            "dir": "",
728            "staticdir": "",
729            "libdir": "/usr/lib/debug",
730            "staticlibdir": "/usr/lib/debug-static",
731            "srcdir": "/usr/src/debug",
732        }
733    elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
734        # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
735        debug_vars = {
736            "append": "",
737            "staticappend": "",
738            "dir": "/.debug",
739            "staticdir": "/.debug-static",
740            "libdir": "",
741            "staticlibdir": "",
742            "srcdir": "",
743        }
744    elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
745        debug_vars = {
746            "append": "",
747            "staticappend": "",
748            "dir": "/.debug",
749            "staticdir": "/.debug-static",
750            "libdir": "",
751            "staticlibdir": "",
752            "srcdir": "/usr/src/debug",
753        }
754    else:
755        # Original OE-core, a.k.a. ".debug", style debug info
756        debug_vars = {
757            "append": "",
758            "staticappend": "",
759            "dir": "/.debug",
760            "staticdir": "/.debug-static",
761            "libdir": "",
762            "staticlibdir": "",
763            "srcdir": "/usr/src/debug",
764        }
765
766    return debug_vars
767
768
769def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
770    debugfiles = {}
771
772    for line in dwarfsrcfiles_output.splitlines():
773        if line.startswith("\t"):
774            debugfiles[os.path.normpath(line.split()[0])] = ""
775
776    return debugfiles.keys()
777
778def source_info(file, d, fatal=True):
779    cmd = ["dwarfsrcfiles", file]
780    try:
781        output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
782        retval = 0
783    except subprocess.CalledProcessError as exc:
784        output = exc.output
785        retval = exc.returncode
786
787    # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
788    if retval != 0 and retval != 255:
789        msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
790        if fatal:
791            bb.fatal(msg)
792        bb.note(msg)
793
794    debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
795
796    return list(debugsources)
797
798def splitdebuginfo(file, dvar, dv, d):
799    # Function to split a single file into two components, one is the stripped
800    # target system binary, the other contains any debugging information. The
801    # two files are linked to reference each other.
802    #
803    # return a mapping of files:debugsources
804
805    src = file[len(dvar):]
806    dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
807    debugfile = dvar + dest
808    sources = []
809
810    if file.endswith(".ko") and file.find("/lib/modules/") != -1:
811        if oe.package.is_kernel_module_signed(file):
812            bb.debug(1, "Skip strip on signed module %s" % file)
813            return (file, sources)
814
815    # Split the file...
816    bb.utils.mkdirhier(os.path.dirname(debugfile))
817    #bb.note("Split %s -> %s" % (file, debugfile))
818    # Only store off the hard link reference if we successfully split!
819
820    dvar = d.getVar('PKGD')
821    objcopy = d.getVar("OBJCOPY")
822
823    newmode = None
824    if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
825        origmode = os.stat(file)[stat.ST_MODE]
826        newmode = origmode | stat.S_IWRITE | stat.S_IREAD
827        os.chmod(file, newmode)
828
829    # We need to extract the debug src information here...
830    if dv["srcdir"]:
831        sources = source_info(file, d)
832
833    bb.utils.mkdirhier(os.path.dirname(debugfile))
834
835    subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
836
837    # Set the debuglink to have the view of the file path on the target
838    subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
839
840    if newmode:
841        os.chmod(file, origmode)
842
843    return (file, sources)
844
845def splitstaticdebuginfo(file, dvar, dv, d):
846    # Unlike the function above, there is no way to split a static library
847    # two components.  So to get similar results we will copy the unmodified
848    # static library (containing the debug symbols) into a new directory.
849    # We will then strip (preserving symbols) the static library in the
850    # typical location.
851    #
852    # return a mapping of files:debugsources
853
854    src = file[len(dvar):]
855    dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
856    debugfile = dvar + dest
857    sources = []
858
859    # Copy the file...
860    bb.utils.mkdirhier(os.path.dirname(debugfile))
861    #bb.note("Copy %s -> %s" % (file, debugfile))
862
863    dvar = d.getVar('PKGD')
864
865    newmode = None
866    if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
867        origmode = os.stat(file)[stat.ST_MODE]
868        newmode = origmode | stat.S_IWRITE | stat.S_IREAD
869        os.chmod(file, newmode)
870
871    # We need to extract the debug src information here...
872    if dv["srcdir"]:
873        sources = source_info(file, d)
874
875    bb.utils.mkdirhier(os.path.dirname(debugfile))
876
877    # Copy the unmodified item to the debug directory
878    shutil.copy2(file, debugfile)
879
880    if newmode:
881        os.chmod(file, origmode)
882
883    return (file, sources)
884
885def inject_minidebuginfo(file, dvar, dv, d):
886    # Extract just the symbols from debuginfo into minidebuginfo,
887    # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
888    # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
889
890    readelf = d.getVar('READELF')
891    nm = d.getVar('NM')
892    objcopy = d.getVar('OBJCOPY')
893
894    minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
895
896    src = file[len(dvar):]
897    dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
898    debugfile = dvar + dest
899    minidebugfile = minidebuginfodir + src + '.minidebug'
900    bb.utils.mkdirhier(os.path.dirname(minidebugfile))
901
902    # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
903    # so skip it.
904    if not os.path.exists(debugfile):
905        bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
906        return
907
908    # minidebuginfo does not make sense to apply to ELF objects other than
909    # executables and shared libraries, skip applying the minidebuginfo
910    # generation for objects like kernel modules.
911    for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines():
912        if not line.strip().startswith("Type:"):
913            continue
914        elftype = line.split(":")[1].strip()
915        if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]):
916            bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file))
917            return
918        break
919
920    # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
921    # We will exclude all of these from minidebuginfo to save space.
922    remove_section_names = []
923    for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
924        # strip the leading "  [ 1]" section index to allow splitting on space
925        if ']' not in line:
926            continue
927        fields = line[line.index(']') + 1:].split()
928        if len(fields) < 7:
929            continue
930        name = fields[0]
931        type = fields[1]
932        flags = fields[6]
933        # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
934        if name.startswith('.debug_'):
935            continue
936        if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
937            remove_section_names.append(name)
938
939    # List dynamic symbols in the binary. We can exclude these from minidebuginfo
940    # because they are always present in the binary.
941    dynsyms = set()
942    for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
943        dynsyms.add(line.split()[0])
944
945    # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
946    # These are the ones we want to keep in minidebuginfo.
947    keep_symbols_file = minidebugfile + '.symlist'
948    found_any_symbols = False
949    with open(keep_symbols_file, 'w') as f:
950        for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
951            fields = line.split('|')
952            if len(fields) < 7:
953                continue
954            name = fields[0].strip()
955            type = fields[3].strip()
956            if type == 'FUNC' and name not in dynsyms:
957                f.write('{}\n'.format(name))
958                found_any_symbols = True
959
960    if not found_any_symbols:
961        bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
962        return
963
964    bb.utils.remove(minidebugfile)
965    bb.utils.remove(minidebugfile + '.xz')
966
967    subprocess.check_call([objcopy, '-S'] +
968                          ['--remove-section={}'.format(s) for s in remove_section_names] +
969                          ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
970
971    subprocess.check_call(['xz', '--keep', minidebugfile])
972
973    subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
974
975def copydebugsources(debugsrcdir, sources, d):
976    # The debug src information written out to sourcefile is further processed
977    # and copied to the destination here.
978
979    cpath = oe.cachedpath.CachedPath()
980
981    if debugsrcdir and sources:
982        sourcefile = d.expand("${WORKDIR}/debugsources.list")
983        bb.utils.remove(sourcefile)
984
985        # filenames are null-separated - this is an artefact of the previous use
986        # of rpm's debugedit, which was writing them out that way, and the code elsewhere
987        # is still assuming that.
988        debuglistoutput = '\0'.join(sources) + '\0'
989        with open(sourcefile, 'a') as sf:
990           sf.write(debuglistoutput)
991
992        dvar = d.getVar('PKGD')
993        strip = d.getVar("STRIP")
994        objcopy = d.getVar("OBJCOPY")
995        workdir = d.getVar("WORKDIR")
996        sdir = d.getVar("S")
997        cflags = d.expand("${CFLAGS}")
998
999        prefixmap = {}
1000        for flag in cflags.split():
1001            if not flag.startswith("-fdebug-prefix-map"):
1002                continue
1003            if "recipe-sysroot" in flag:
1004                continue
1005            flag = flag.split("=")
1006            prefixmap[flag[1]] = flag[2]
1007
1008        nosuchdir = []
1009        basepath = dvar
1010        for p in debugsrcdir.split("/"):
1011            basepath = basepath + "/" + p
1012            if not cpath.exists(basepath):
1013                nosuchdir.append(basepath)
1014        bb.utils.mkdirhier(basepath)
1015        cpath.updatecache(basepath)
1016
1017        for pmap in prefixmap:
1018            # Ignore files from the recipe sysroots (target and native)
1019            cmd =  "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile
1020            # We need to ignore files that are not actually ours
1021            # we do this by only paying attention to items from this package
1022            cmd += "fgrep -zw '%s' | " % prefixmap[pmap]
1023            # Remove prefix in the source paths
1024            cmd += "sed 's#%s/##g' | " % (prefixmap[pmap])
1025            cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap])
1026
1027            try:
1028                subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1029            except subprocess.CalledProcessError:
1030                # Can "fail" if internal headers/transient sources are attempted
1031                pass
1032            # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
1033            # Work around this by manually finding and copying any symbolic links that made it through.
1034            cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
1035                    (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap])
1036            subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1037
1038        # debugsources.list may be polluted from the host if we used externalsrc,
1039        # cpio uses copy-pass and may have just created a directory structure
1040        # matching the one from the host, if thats the case move those files to
1041        # debugsrcdir to avoid host contamination.
1042        # Empty dir structure will be deleted in the next step.
1043
1044        # Same check as above for externalsrc
1045        if workdir not in sdir:
1046            if os.path.exists(dvar + debugsrcdir + sdir):
1047                cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
1048                subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1049
1050        # The copy by cpio may have resulted in some empty directories!  Remove these
1051        cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
1052        subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1053
1054        # Also remove debugsrcdir if its empty
1055        for p in nosuchdir[::-1]:
1056            if os.path.exists(p) and not os.listdir(p):
1057                os.rmdir(p)
1058
1059
1060def process_split_and_strip_files(d):
1061    cpath = oe.cachedpath.CachedPath()
1062
1063    dvar = d.getVar('PKGD')
1064    pn = d.getVar('PN')
1065    hostos = d.getVar('HOST_OS')
1066
1067    oldcwd = os.getcwd()
1068    os.chdir(dvar)
1069
1070    dv = package_debug_vars(d)
1071
1072    #
1073    # First lets figure out all of the files we may have to process ... do this only once!
1074    #
1075    elffiles = {}
1076    symlinks = {}
1077    staticlibs = []
1078    inodes = {}
1079    libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
1080    baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
1081    skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
1082    if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
1083            d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1084        checkelf = {}
1085        checkelflinks = {}
1086        checkstatic = {}
1087        for root, dirs, files in cpath.walk(dvar):
1088            for f in files:
1089                file = os.path.join(root, f)
1090
1091                # Skip debug files
1092                if dv["append"] and file.endswith(dv["append"]):
1093                    continue
1094                if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
1095                    continue
1096
1097                if file in skipfiles:
1098                    continue
1099
1100                try:
1101                    ltarget = cpath.realpath(file, dvar, False)
1102                    s = cpath.lstat(ltarget)
1103                except OSError as e:
1104                    (err, strerror) = e.args
1105                    if err != errno.ENOENT:
1106                        raise
1107                    # Skip broken symlinks
1108                    continue
1109                if not s:
1110                    continue
1111
1112                if oe.package.is_static_lib(file):
1113                    # Use a reference of device ID and inode number to identify files
1114                    file_reference = "%d_%d" % (s.st_dev, s.st_ino)
1115                    checkstatic[file] = (file, file_reference)
1116                    continue
1117
1118                # Check its an executable
1119                if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
1120                        or (s[stat.ST_MODE] & stat.S_IXOTH) \
1121                        or ((file.startswith(libdir) or file.startswith(baselibdir)) \
1122                        and (".so" in f or ".node" in f)) \
1123                        or (f.startswith('vmlinux') or ".ko" in f):
1124
1125                    if cpath.islink(file):
1126                        checkelflinks[file] = ltarget
1127                        continue
1128                    # Use a reference of device ID and inode number to identify files
1129                    file_reference = "%d_%d" % (s.st_dev, s.st_ino)
1130                    checkelf[file] = (file, file_reference)
1131
1132        results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
1133        results_map = {}
1134        for (ltarget, elf_file) in results:
1135            results_map[ltarget] = elf_file
1136        for file in checkelflinks:
1137            ltarget = checkelflinks[file]
1138            # If it's a symlink, and points to an ELF file, we capture the readlink target
1139            if results_map[ltarget]:
1140                target = os.readlink(file)
1141                #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
1142                symlinks[file] = target
1143
1144        results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
1145
1146        # Sort results by file path. This ensures that the files are always
1147        # processed in the same order, which is important to make sure builds
1148        # are reproducible when dealing with hardlinks
1149        results.sort(key=lambda x: x[0])
1150
1151        for (file, elf_file) in results:
1152            # It's a file (or hardlink), not a link
1153            # ...but is it ELF, and is it already stripped?
1154            if elf_file & 1:
1155                if elf_file & 2:
1156                    if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1157                        bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
1158                    else:
1159                        msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
1160                        oe.qa.handle_error("already-stripped", msg, d)
1161                    continue
1162
1163                # At this point we have an unstripped elf file. We need to:
1164                #  a) Make sure any file we strip is not hardlinked to anything else outside this tree
1165                #  b) Only strip any hardlinked file once (no races)
1166                #  c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
1167
1168                # Use a reference of device ID and inode number to identify files
1169                file_reference = checkelf[file][1]
1170                if file_reference in inodes:
1171                    os.unlink(file)
1172                    os.link(inodes[file_reference][0], file)
1173                    inodes[file_reference].append(file)
1174                else:
1175                    inodes[file_reference] = [file]
1176                    # break hardlink
1177                    bb.utils.break_hardlinks(file)
1178                    elffiles[file] = elf_file
1179                # Modified the file so clear the cache
1180                cpath.updatecache(file)
1181
1182        # Do the same hardlink processing as above, but for static libraries
1183        results = list(checkstatic.keys())
1184
1185        # As above, sort the results.
1186        results.sort(key=lambda x: x[0])
1187
1188        for file in results:
1189            # Use a reference of device ID and inode number to identify files
1190            file_reference = checkstatic[file][1]
1191            if file_reference in inodes:
1192                os.unlink(file)
1193                os.link(inodes[file_reference][0], file)
1194                inodes[file_reference].append(file)
1195            else:
1196                inodes[file_reference] = [file]
1197                # break hardlink
1198                bb.utils.break_hardlinks(file)
1199                staticlibs.append(file)
1200            # Modified the file so clear the cache
1201            cpath.updatecache(file)
1202
1203    def strip_pkgd_prefix(f):
1204        nonlocal dvar
1205
1206        if f.startswith(dvar):
1207            return f[len(dvar):]
1208
1209        return f
1210
1211    #
1212    # First lets process debug splitting
1213    #
1214    if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1215        results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
1216
1217        if dv["srcdir"] and not hostos.startswith("mingw"):
1218            if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1219                results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
1220            else:
1221                for file in staticlibs:
1222                    results.append( (file,source_info(file, d)) )
1223
1224        d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
1225
1226        sources = set()
1227        for r in results:
1228            sources.update(r[1])
1229
1230        # Hardlink our debug symbols to the other hardlink copies
1231        for ref in inodes:
1232            if len(inodes[ref]) == 1:
1233                continue
1234
1235            target = inodes[ref][0][len(dvar):]
1236            for file in inodes[ref][1:]:
1237                src = file[len(dvar):]
1238                dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1239                fpath = dvar + dest
1240                ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1241                if os.access(ftarget, os.R_OK):
1242                    bb.utils.mkdirhier(os.path.dirname(fpath))
1243                    # Only one hardlink of separated debug info file in each directory
1244                    if not os.access(fpath, os.R_OK):
1245                        #bb.note("Link %s -> %s" % (fpath, ftarget))
1246                        os.link(ftarget, fpath)
1247                elif (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1248                    deststatic = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(file) + dv["staticappend"]
1249                    fpath = dvar + deststatic
1250                    ftarget = dvar + dv["staticlibdir"] + os.path.dirname(target) + dv["staticdir"] + "/" + os.path.basename(target) + dv["staticappend"]
1251                    if os.access(ftarget, os.R_OK):
1252                        bb.utils.mkdirhier(os.path.dirname(fpath))
1253                        # Only one hardlink of separated debug info file in each directory
1254                        if not os.access(fpath, os.R_OK):
1255                            #bb.note("Link %s -> %s" % (fpath, ftarget))
1256                            os.link(ftarget, fpath)
1257                else:
1258                    bb.note("Unable to find inode link target %s" % (target))
1259
1260        # Create symlinks for all cases we were able to split symbols
1261        for file in symlinks:
1262            src = file[len(dvar):]
1263            dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
1264            fpath = dvar + dest
1265            # Skip it if the target doesn't exist
1266            try:
1267                s = os.stat(fpath)
1268            except OSError as e:
1269                (err, strerror) = e.args
1270                if err != errno.ENOENT:
1271                    raise
1272                continue
1273
1274            ltarget = symlinks[file]
1275            lpath = os.path.dirname(ltarget)
1276            lbase = os.path.basename(ltarget)
1277            ftarget = ""
1278            if lpath and lpath != ".":
1279                ftarget += lpath + dv["dir"] + "/"
1280            ftarget += lbase + dv["append"]
1281            if lpath.startswith(".."):
1282                ftarget = os.path.join("..", ftarget)
1283            bb.utils.mkdirhier(os.path.dirname(fpath))
1284            #bb.note("Symlink %s -> %s" % (fpath, ftarget))
1285            os.symlink(ftarget, fpath)
1286
1287        # Process the dv["srcdir"] if requested...
1288        # This copies and places the referenced sources for later debugging...
1289        copydebugsources(dv["srcdir"], sources, d)
1290    #
1291    # End of debug splitting
1292    #
1293
1294    #
1295    # Now lets go back over things and strip them
1296    #
1297    if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
1298        strip = d.getVar("STRIP")
1299        sfiles = []
1300        for file in elffiles:
1301            elf_file = int(elffiles[file])
1302            #bb.note("Strip %s" % file)
1303            sfiles.append((file, elf_file, strip))
1304        if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1305            for f in staticlibs:
1306                sfiles.append((f, 16, strip))
1307
1308        oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
1309
1310    # Build "minidebuginfo" and reinject it back into the stripped binaries
1311    if bb.utils.contains('DISTRO_FEATURES', 'minidebuginfo', True, False, d):
1312        oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
1313                                     extraargs=(dvar, dv, d))
1314
1315    #
1316    # End of strip
1317    #
1318    os.chdir(oldcwd)
1319
1320
1321def populate_packages(d):
1322    cpath = oe.cachedpath.CachedPath()
1323
1324    workdir = d.getVar('WORKDIR')
1325    outdir = d.getVar('DEPLOY_DIR')
1326    dvar = d.getVar('PKGD')
1327    packages = d.getVar('PACKAGES').split()
1328    pn = d.getVar('PN')
1329
1330    bb.utils.mkdirhier(outdir)
1331    os.chdir(dvar)
1332
1333    autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
1334
1335    split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
1336
1337    # If debug-with-srcpkg mode is enabled then add the source package if it
1338    # doesn't exist and add the source file contents to the source package.
1339    if split_source_package:
1340        src_package_name = ('%s-src' % d.getVar('PN'))
1341        if not src_package_name in packages:
1342            packages.append(src_package_name)
1343        d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
1344
1345    # Sanity check PACKAGES for duplicates
1346    # Sanity should be moved to sanity.bbclass once we have the infrastructure
1347    package_dict = {}
1348
1349    for i, pkg in enumerate(packages):
1350        if pkg in package_dict:
1351            msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
1352            oe.qa.handle_error("packages-list", msg, d)
1353        # Ensure the source package gets the chance to pick up the source files
1354        # before the debug package by ordering it first in PACKAGES. Whether it
1355        # actually picks up any source files is controlled by
1356        # PACKAGE_DEBUG_SPLIT_STYLE.
1357        elif pkg.endswith("-src"):
1358            package_dict[pkg] = (10, i)
1359        elif autodebug and pkg.endswith("-dbg"):
1360            package_dict[pkg] = (30, i)
1361        else:
1362            package_dict[pkg] = (50, i)
1363    packages = sorted(package_dict.keys(), key=package_dict.get)
1364    d.setVar('PACKAGES', ' '.join(packages))
1365    pkgdest = d.getVar('PKGDEST')
1366
1367    seen = []
1368
1369    # os.mkdir masks the permissions with umask so we have to unset it first
1370    oldumask = os.umask(0)
1371
1372    debug = []
1373    for root, dirs, files in cpath.walk(dvar):
1374        dir = root[len(dvar):]
1375        if not dir:
1376            dir = os.sep
1377        for f in (files + dirs):
1378            path = "." + os.path.join(dir, f)
1379            if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
1380                debug.append(path)
1381
1382    for pkg in packages:
1383        root = os.path.join(pkgdest, pkg)
1384        bb.utils.mkdirhier(root)
1385
1386        filesvar = d.getVar('FILES:%s' % pkg) or ""
1387        if "//" in filesvar:
1388            msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1389            oe.qa.handle_error("files-invalid", msg, d)
1390            filesvar.replace("//", "/")
1391
1392        origfiles = filesvar.split()
1393        files, symlink_paths = oe.package.files_from_filevars(origfiles)
1394
1395        if autodebug and pkg.endswith("-dbg"):
1396            files.extend(debug)
1397
1398        for file in files:
1399            if (not cpath.islink(file)) and (not cpath.exists(file)):
1400                continue
1401            if file in seen:
1402                continue
1403            seen.append(file)
1404
1405            def mkdir(src, dest, p):
1406                src = os.path.join(src, p)
1407                dest = os.path.join(dest, p)
1408                fstat = cpath.stat(src)
1409                os.mkdir(dest)
1410                os.chmod(dest, fstat.st_mode)
1411                os.chown(dest, fstat.st_uid, fstat.st_gid)
1412                if p not in seen:
1413                    seen.append(p)
1414                cpath.updatecache(dest)
1415
1416            def mkdir_recurse(src, dest, paths):
1417                if cpath.exists(dest + '/' + paths):
1418                    return
1419                while paths.startswith("./"):
1420                    paths = paths[2:]
1421                p = "."
1422                for c in paths.split("/"):
1423                    p = os.path.join(p, c)
1424                    if not cpath.exists(os.path.join(dest, p)):
1425                        mkdir(src, dest, p)
1426
1427            if cpath.isdir(file) and not cpath.islink(file):
1428                mkdir_recurse(dvar, root, file)
1429                continue
1430
1431            mkdir_recurse(dvar, root, os.path.dirname(file))
1432            fpath = os.path.join(root,file)
1433            if not cpath.islink(file):
1434                os.link(file, fpath)
1435                continue
1436            ret = bb.utils.copyfile(file, fpath)
1437            if ret is False or ret == 0:
1438                bb.fatal("File population failed")
1439
1440        # Check if symlink paths exist
1441        for file in symlink_paths:
1442            if not os.path.exists(os.path.join(root,file)):
1443                bb.fatal("File '%s' cannot be packaged into '%s' because its "
1444                         "parent directory structure does not exist. One of "
1445                         "its parent directories is a symlink whose target "
1446                         "directory is not included in the package." %
1447                         (file, pkg))
1448
1449    os.umask(oldumask)
1450    os.chdir(workdir)
1451
1452    # Handle excluding packages with incompatible licenses
1453    package_list = []
1454    skipped_pkgs = oe.license.skip_incompatible_package_licenses(d, packages)
1455    for pkg in packages:
1456        if pkg in skipped_pkgs:
1457            msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, skipped_pkgs[pkg])
1458            oe.qa.handle_error("incompatible-license", msg, d)
1459        else:
1460            package_list.append(pkg)
1461    d.setVar('PACKAGES', ' '.join(package_list))
1462
1463    unshipped = []
1464    for root, dirs, files in cpath.walk(dvar):
1465        dir = root[len(dvar):]
1466        if not dir:
1467            dir = os.sep
1468        for f in (files + dirs):
1469            path = os.path.join(dir, f)
1470            if ('.' + path) not in seen:
1471                unshipped.append(path)
1472
1473    if unshipped != []:
1474        msg = pn + ": Files/directories were installed but not shipped in any package:"
1475        if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1476            bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1477        else:
1478            for f in unshipped:
1479                msg = msg + "\n  " + f
1480            msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
1481            msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
1482            oe.qa.handle_error("installed-vs-shipped", msg, d)
1483
1484def process_fixsymlinks(pkgfiles, d):
1485    cpath = oe.cachedpath.CachedPath()
1486    pkgdest = d.getVar('PKGDEST')
1487    packages = d.getVar("PACKAGES", False).split()
1488
1489    dangling_links = {}
1490    pkg_files = {}
1491    for pkg in packages:
1492        dangling_links[pkg] = []
1493        pkg_files[pkg] = []
1494        inst_root = os.path.join(pkgdest, pkg)
1495        for path in pkgfiles[pkg]:
1496                rpath = path[len(inst_root):]
1497                pkg_files[pkg].append(rpath)
1498                rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1499                if not cpath.lexists(rtarget):
1500                    dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1501
1502    newrdepends = {}
1503    for pkg in dangling_links:
1504        for l in dangling_links[pkg]:
1505            found = False
1506            bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1507            for p in packages:
1508                if l in pkg_files[p]:
1509                        found = True
1510                        bb.debug(1, "target found in %s" % p)
1511                        if p == pkg:
1512                            break
1513                        if pkg not in newrdepends:
1514                            newrdepends[pkg] = []
1515                        newrdepends[pkg].append(p)
1516                        break
1517            if found == False:
1518                bb.note("%s contains dangling symlink to %s" % (pkg, l))
1519
1520    for pkg in newrdepends:
1521        rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
1522        for p in newrdepends[pkg]:
1523            if p not in rdepends:
1524                rdepends[p] = []
1525        d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1526
1527def process_filedeps(pkgfiles, d):
1528    """
1529    Collect perfile run-time dependency metadata
1530    Output:
1531     FILERPROVIDESFLIST:pkg - list of all files w/ deps
1532     FILERPROVIDES:filepath:pkg - per file dep
1533
1534      FILERDEPENDSFLIST:pkg - list of all files w/ deps
1535      FILERDEPENDS:filepath:pkg - per file dep
1536    """
1537    if d.getVar('SKIP_FILEDEPS') == '1':
1538        return
1539
1540    pkgdest = d.getVar('PKGDEST')
1541    packages = d.getVar('PACKAGES')
1542    rpmdeps = d.getVar('RPMDEPS')
1543
1544    def chunks(files, n):
1545        return [files[i:i+n] for i in range(0, len(files), n)]
1546
1547    pkglist = []
1548    for pkg in packages.split():
1549        if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
1550            continue
1551        if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
1552            continue
1553        for files in chunks(pkgfiles[pkg], 100):
1554            pkglist.append((pkg, files, rpmdeps, pkgdest))
1555
1556    processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
1557
1558    provides_files = {}
1559    requires_files = {}
1560
1561    for result in processed:
1562        (pkg, provides, requires) = result
1563
1564        if pkg not in provides_files:
1565            provides_files[pkg] = []
1566        if pkg not in requires_files:
1567            requires_files[pkg] = []
1568
1569        for file in sorted(provides):
1570            provides_files[pkg].append(file)
1571            key = "FILERPROVIDES:" + file + ":" + pkg
1572            d.appendVar(key, " " + " ".join(provides[file]))
1573
1574        for file in sorted(requires):
1575            requires_files[pkg].append(file)
1576            key = "FILERDEPENDS:" + file + ":" + pkg
1577            d.appendVar(key, " " + " ".join(requires[file]))
1578
1579    for pkg in requires_files:
1580        d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
1581    for pkg in provides_files:
1582        d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
1583
1584def process_shlibs(pkgfiles, d):
1585    cpath = oe.cachedpath.CachedPath()
1586
1587    exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
1588    if exclude_shlibs:
1589        bb.note("not generating shlibs")
1590        return
1591
1592    lib_re = re.compile(r"^.*\.so")
1593    libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
1594
1595    packages = d.getVar('PACKAGES')
1596
1597    shlib_pkgs = []
1598    exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
1599    if exclusion_list:
1600        for pkg in packages.split():
1601            if pkg not in exclusion_list.split():
1602                shlib_pkgs.append(pkg)
1603            else:
1604                bb.note("not generating shlibs for %s" % pkg)
1605    else:
1606        shlib_pkgs = packages.split()
1607
1608    hostos = d.getVar('HOST_OS')
1609
1610    workdir = d.getVar('WORKDIR')
1611
1612    ver = d.getVar('PKGV')
1613    if not ver:
1614        msg = "PKGV not defined"
1615        oe.qa.handle_error("pkgv-undefined", msg, d)
1616        return
1617
1618    pkgdest = d.getVar('PKGDEST')
1619
1620    shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1621
1622    def linux_so(file, pkg, pkgver, d):
1623        needs_ldconfig = False
1624        needed = set()
1625        sonames = set()
1626        ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1627        cmd = d.getVar('OBJDUMP') + " -p " + shlex.quote(file) + " 2>/dev/null"
1628        fd = os.popen(cmd)
1629        lines = fd.readlines()
1630        fd.close()
1631        rpath = tuple()
1632        for l in lines:
1633            m = re.match(r"\s+RPATH\s+([^\s]*)", l)
1634            if m:
1635                rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
1636                rpath = tuple(map(os.path.normpath, rpaths))
1637        for l in lines:
1638            m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
1639            if m:
1640                dep = m.group(1)
1641                if dep not in needed:
1642                    needed.add((dep, file, rpath))
1643            m = re.match(r"\s+SONAME\s+([^\s]*)", l)
1644            if m:
1645                this_soname = m.group(1)
1646                prov = (this_soname, ldir, pkgver)
1647                if not prov in sonames:
1648                    # if library is private (only used by package) then do not build shlib for it
1649                    if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
1650                        sonames.add(prov)
1651                if libdir_re.match(os.path.dirname(file)):
1652                    needs_ldconfig = True
1653        return (needs_ldconfig, needed, sonames)
1654
1655    def darwin_so(file, needed, sonames, pkgver):
1656        if not os.path.exists(file):
1657            return
1658        ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1659
1660        def get_combinations(base):
1661            #
1662            # Given a base library name, find all combinations of this split by "." and "-"
1663            #
1664            combos = []
1665            options = base.split(".")
1666            for i in range(1, len(options) + 1):
1667                combos.append(".".join(options[0:i]))
1668            options = base.split("-")
1669            for i in range(1, len(options) + 1):
1670                combos.append("-".join(options[0:i]))
1671            return combos
1672
1673        if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
1674            # Drop suffix
1675            name = os.path.basename(file).rsplit(".",1)[0]
1676            # Find all combinations
1677            combos = get_combinations(name)
1678            for combo in combos:
1679                if not combo in sonames:
1680                    prov = (combo, ldir, pkgver)
1681                    sonames.add(prov)
1682        if file.endswith('.dylib') or file.endswith('.so'):
1683            rpath = []
1684            p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
1685            out, err = p.communicate()
1686            # If returned successfully, process stdout for results
1687            if p.returncode == 0:
1688                for l in out.split("\n"):
1689                    l = l.strip()
1690                    if l.startswith('path '):
1691                        rpath.append(l.split()[1])
1692
1693        p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
1694        out, err = p.communicate()
1695        # If returned successfully, process stdout for results
1696        if p.returncode == 0:
1697            for l in out.split("\n"):
1698                l = l.strip()
1699                if not l or l.endswith(":"):
1700                    continue
1701                if "is not an object file" in l:
1702                    continue
1703                name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1704                if name and name not in needed[pkg]:
1705                     needed[pkg].add((name, file, tuple()))
1706
1707    def mingw_dll(file, needed, sonames, pkgver):
1708        if not os.path.exists(file):
1709            return
1710
1711        if file.endswith(".dll"):
1712            # assume all dlls are shared objects provided by the package
1713            sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
1714
1715        if (file.endswith(".dll") or file.endswith(".exe")):
1716            # use objdump to search for "DLL Name: .*\.dll"
1717            p = subprocess.Popen([d.expand("${OBJDUMP}"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1718            out, err = p.communicate()
1719            # process the output, grabbing all .dll names
1720            if p.returncode == 0:
1721                for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
1722                    dllname = m.group(1)
1723                    if dllname:
1724                        needed[pkg].add((dllname, file, tuple()))
1725
1726    needed = {}
1727
1728    shlib_provider = oe.package.read_shlib_providers(d)
1729
1730    for pkg in shlib_pkgs:
1731        private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1732        private_libs = private_libs.split()
1733        needs_ldconfig = False
1734        bb.debug(2, "calculating shlib provides for %s" % pkg)
1735
1736        pkgver = d.getVar('PKGV:' + pkg)
1737        if not pkgver:
1738            pkgver = d.getVar('PV_' + pkg)
1739        if not pkgver:
1740            pkgver = ver
1741
1742        needed[pkg] = set()
1743        sonames = set()
1744        linuxlist = []
1745        for file in pkgfiles[pkg]:
1746                soname = None
1747                if cpath.islink(file):
1748                    continue
1749                if hostos.startswith("darwin"):
1750                    darwin_so(file, needed, sonames, pkgver)
1751                elif hostos.startswith("mingw"):
1752                    mingw_dll(file, needed, sonames, pkgver)
1753                elif os.access(file, os.X_OK) or lib_re.match(file):
1754                    linuxlist.append(file)
1755
1756        if linuxlist:
1757            results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
1758            for r in results:
1759                ldconfig = r[0]
1760                needed[pkg] |= r[1]
1761                sonames |= r[2]
1762                needs_ldconfig = needs_ldconfig or ldconfig
1763
1764        shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1765        if len(sonames):
1766            with open(shlibs_file, 'w') as fd:
1767                for s in sorted(sonames):
1768                    if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
1769                        (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
1770                        if old_pkg != pkg:
1771                            bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
1772                    bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
1773                    fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
1774                    if s[0] not in shlib_provider:
1775                        shlib_provider[s[0]] = {}
1776                    shlib_provider[s[0]][s[1]] = (pkg, pkgver)
1777        if needs_ldconfig:
1778            bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1779            postinst = d.getVar('pkg_postinst:%s' % pkg)
1780            if not postinst:
1781                postinst = '#!/bin/sh\n'
1782            postinst += d.getVar('ldconfig_postinst_fragment')
1783            d.setVar('pkg_postinst:%s' % pkg, postinst)
1784        bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1785
1786    assumed_libs = d.getVar('ASSUME_SHLIBS')
1787    if assumed_libs:
1788        libdir = d.getVar("libdir")
1789        for e in assumed_libs.split():
1790            l, dep_pkg = e.split(":")
1791            lib_ver = None
1792            dep_pkg = dep_pkg.rsplit("_", 1)
1793            if len(dep_pkg) == 2:
1794                lib_ver = dep_pkg[1]
1795            dep_pkg = dep_pkg[0]
1796            if l not in shlib_provider:
1797                shlib_provider[l] = {}
1798            shlib_provider[l][libdir] = (dep_pkg, lib_ver)
1799
1800    libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
1801
1802    for pkg in shlib_pkgs:
1803        bb.debug(2, "calculating shlib requirements for %s" % pkg)
1804
1805        private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1806        private_libs = private_libs.split()
1807
1808        deps = list()
1809        for n in needed[pkg]:
1810            # if n is in private libraries, don't try to search provider for it
1811            # this could cause problem in case some abc.bb provides private
1812            # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
1813            # but skipping it is still better alternative than providing own
1814            # version and then adding runtime dependency for the same system library
1815            if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
1816                bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
1817                continue
1818            if n[0] in shlib_provider.keys():
1819                shlib_provider_map = shlib_provider[n[0]]
1820                matches = set()
1821                for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
1822                    if p in shlib_provider_map:
1823                        matches.add(p)
1824                if len(matches) > 1:
1825                    matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
1826                    bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
1827                elif len(matches) == 1:
1828                    (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
1829
1830                    bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
1831
1832                    if dep_pkg == pkg:
1833                        continue
1834
1835                    if ver_needed:
1836                        dep = "%s (>= %s)" % (dep_pkg, ver_needed)
1837                    else:
1838                        dep = dep_pkg
1839                    if not dep in deps:
1840                        deps.append(dep)
1841                    continue
1842            bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
1843
1844        deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
1845        if os.path.exists(deps_file):
1846            os.remove(deps_file)
1847        if deps:
1848            with open(deps_file, 'w') as fd:
1849                for dep in sorted(deps):
1850                    fd.write(dep + '\n')
1851
1852def process_pkgconfig(pkgfiles, d):
1853    packages = d.getVar('PACKAGES')
1854    workdir = d.getVar('WORKDIR')
1855    pkgdest = d.getVar('PKGDEST')
1856
1857    shlibs_dirs = d.getVar('SHLIBSDIRS').split()
1858    shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1859
1860    pc_re = re.compile(r'(.*)\.pc$')
1861    var_re = re.compile(r'(.*)=(.*)')
1862    field_re = re.compile(r'(.*): (.*)')
1863
1864    pkgconfig_provided = {}
1865    pkgconfig_needed = {}
1866    for pkg in packages.split():
1867        pkgconfig_provided[pkg] = []
1868        pkgconfig_needed[pkg] = []
1869        for file in sorted(pkgfiles[pkg]):
1870                m = pc_re.match(file)
1871                if m:
1872                    pd = bb.data.init()
1873                    name = m.group(1)
1874                    pkgconfig_provided[pkg].append(os.path.basename(name))
1875                    if not os.access(file, os.R_OK):
1876                        continue
1877                    with open(file, 'r') as f:
1878                        lines = f.readlines()
1879                    for l in lines:
1880                        m = field_re.match(l)
1881                        if m:
1882                            hdr = m.group(1)
1883                            exp = pd.expand(m.group(2))
1884                            if hdr == 'Requires' or hdr == 'Requires.private':
1885                                pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1886                                continue
1887                        m = var_re.match(l)
1888                        if m:
1889                            name = m.group(1)
1890                            val = m.group(2)
1891                            pd.setVar(name, pd.expand(val))
1892
1893    for pkg in packages.split():
1894        pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
1895        if pkgconfig_provided[pkg] != []:
1896            with open(pkgs_file, 'w') as f:
1897                for p in sorted(pkgconfig_provided[pkg]):
1898                    f.write('%s\n' % p)
1899
1900    # Go from least to most specific since the last one found wins
1901    for dir in reversed(shlibs_dirs):
1902        if not os.path.exists(dir):
1903            continue
1904        for file in sorted(os.listdir(dir)):
1905            m = re.match(r'^(.*)\.pclist$', file)
1906            if m:
1907                pkg = m.group(1)
1908                with open(os.path.join(dir, file)) as fd:
1909                    lines = fd.readlines()
1910                pkgconfig_provided[pkg] = []
1911                for l in lines:
1912                    pkgconfig_provided[pkg].append(l.rstrip())
1913
1914    for pkg in packages.split():
1915        deps = []
1916        for n in pkgconfig_needed[pkg]:
1917            found = False
1918            for k in pkgconfig_provided.keys():
1919                if n in pkgconfig_provided[k]:
1920                    if k != pkg and not (k in deps):
1921                        deps.append(k)
1922                    found = True
1923            if found == False:
1924                bb.note("couldn't find pkgconfig module '%s' in any package" % n)
1925        deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
1926        if len(deps):
1927            with open(deps_file, 'w') as fd:
1928                for dep in deps:
1929                    fd.write(dep + '\n')
1930
1931def read_libdep_files(d):
1932    pkglibdeps = {}
1933    packages = d.getVar('PACKAGES').split()
1934    for pkg in packages:
1935        pkglibdeps[pkg] = {}
1936        for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
1937            depsfile = d.expand("${PKGDEST}/" + pkg + extension)
1938            if os.access(depsfile, os.R_OK):
1939                with open(depsfile) as fd:
1940                    lines = fd.readlines()
1941                for l in lines:
1942                    l.rstrip()
1943                    deps = bb.utils.explode_dep_versions2(l)
1944                    for dep in deps:
1945                        if not dep in pkglibdeps[pkg]:
1946                            pkglibdeps[pkg][dep] = deps[dep]
1947    return pkglibdeps
1948
1949def process_depchains(pkgfiles, d):
1950    """
1951    For a given set of prefix and postfix modifiers, make those packages
1952    RRECOMMENDS on the corresponding packages for its RDEPENDS.
1953
1954    Example:  If package A depends upon package B, and A's .bb emits an
1955    A-dev package, this would make A-dev Recommends: B-dev.
1956
1957    If only one of a given suffix is specified, it will take the RRECOMMENDS
1958    based on the RDEPENDS of *all* other packages. If more than one of a given
1959    suffix is specified, its will only use the RDEPENDS of the single parent
1960    package.
1961    """
1962
1963    packages  = d.getVar('PACKAGES')
1964    postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
1965    prefixes  = (d.getVar('DEPCHAIN_PRE') or '').split()
1966
1967    def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
1968
1969        #bb.note('depends for %s is %s' % (base, depends))
1970        rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
1971
1972        for depend in sorted(depends):
1973            if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
1974                #bb.note("Skipping %s" % depend)
1975                continue
1976            if depend.endswith('-dev'):
1977                depend = depend[:-4]
1978            if depend.endswith('-dbg'):
1979                depend = depend[:-4]
1980            pkgname = getname(depend, suffix)
1981            #bb.note("Adding %s for %s" % (pkgname, depend))
1982            if pkgname not in rreclist and pkgname != pkg:
1983                rreclist[pkgname] = []
1984
1985        #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
1986        d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1987
1988    def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
1989
1990        #bb.note('rdepends for %s is %s' % (base, rdepends))
1991        rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
1992
1993        for depend in sorted(rdepends):
1994            if depend.find('virtual-locale-') != -1:
1995                #bb.note("Skipping %s" % depend)
1996                continue
1997            if depend.endswith('-dev'):
1998                depend = depend[:-4]
1999            if depend.endswith('-dbg'):
2000                depend = depend[:-4]
2001            pkgname = getname(depend, suffix)
2002            #bb.note("Adding %s for %s" % (pkgname, depend))
2003            if pkgname not in rreclist and pkgname != pkg:
2004                rreclist[pkgname] = []
2005
2006        #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
2007        d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
2008
2009    def add_dep(list, dep):
2010        if dep not in list:
2011            list.append(dep)
2012
2013    depends = []
2014    for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
2015        add_dep(depends, dep)
2016
2017    rdepends = []
2018    for pkg in packages.split():
2019        for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
2020            add_dep(rdepends, dep)
2021
2022    #bb.note('rdepends is %s' % rdepends)
2023
2024    def post_getname(name, suffix):
2025        return '%s%s' % (name, suffix)
2026    def pre_getname(name, suffix):
2027        return '%s%s' % (suffix, name)
2028
2029    pkgs = {}
2030    for pkg in packages.split():
2031        for postfix in postfixes:
2032            if pkg.endswith(postfix):
2033                if not postfix in pkgs:
2034                    pkgs[postfix] = {}
2035                pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
2036
2037        for prefix in prefixes:
2038            if pkg.startswith(prefix):
2039                if not prefix in pkgs:
2040                    pkgs[prefix] = {}
2041                pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
2042
2043    if "-dbg" in pkgs:
2044        pkglibdeps = read_libdep_files(d)
2045        pkglibdeplist = []
2046        for pkg in pkglibdeps:
2047            for k in pkglibdeps[pkg]:
2048                add_dep(pkglibdeplist, k)
2049        dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
2050
2051    for suffix in pkgs:
2052        for pkg in pkgs[suffix]:
2053            if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
2054                continue
2055            (base, func) = pkgs[suffix][pkg]
2056            if suffix == "-dev":
2057                pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
2058            elif suffix == "-dbg":
2059                if not dbgdefaultdeps:
2060                    pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
2061                    continue
2062            if len(pkgs[suffix]) == 1:
2063                pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
2064            else:
2065                rdeps = []
2066                for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
2067                    add_dep(rdeps, dep)
2068                pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
2069