1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7from abc import ABCMeta, abstractmethod
8import os
9import glob
10import subprocess
11import shutil
12import re
13import collections
14import bb
15import tempfile
16import oe.utils
17import oe.path
18import string
19from oe.gpg_sign import get_signer
20import hashlib
21import fnmatch
22
23# this can be used by all PM backends to create the index files in parallel
24def create_index(arg):
25    index_cmd = arg
26
27    bb.note("Executing '%s' ..." % index_cmd)
28    result = subprocess.check_output(index_cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
29    if result:
30        bb.note(result)
31
32def opkg_query(cmd_output):
33    """
34    This method parse the output from the package managerand return
35    a dictionary with the information of the packages. This is used
36    when the packages are in deb or ipk format.
37    """
38    verregex = re.compile(r' \([=<>]* [^ )]*\)')
39    output = dict()
40    pkg = ""
41    arch = ""
42    ver = ""
43    filename = ""
44    dep = []
45    prov = []
46    pkgarch = ""
47    for line in cmd_output.splitlines()+['']:
48        line = line.rstrip()
49        if ':' in line:
50            if line.startswith("Package: "):
51                pkg = line.split(": ")[1]
52            elif line.startswith("Architecture: "):
53                arch = line.split(": ")[1]
54            elif line.startswith("Version: "):
55                ver = line.split(": ")[1]
56            elif line.startswith("File: ") or line.startswith("Filename:"):
57                filename = line.split(": ")[1]
58                if "/" in filename:
59                    filename = os.path.basename(filename)
60            elif line.startswith("Depends: "):
61                depends = verregex.sub('', line.split(": ")[1])
62                for depend in depends.split(", "):
63                    dep.append(depend)
64            elif line.startswith("Recommends: "):
65                recommends = verregex.sub('', line.split(": ")[1])
66                for recommend in recommends.split(", "):
67                    dep.append("%s [REC]" % recommend)
68            elif line.startswith("PackageArch: "):
69                pkgarch = line.split(": ")[1]
70            elif line.startswith("Provides: "):
71                provides = verregex.sub('', line.split(": ")[1])
72                for provide in provides.split(", "):
73                    prov.append(provide)
74
75        # When there is a blank line save the package information
76        elif not line:
77            # IPK doesn't include the filename
78            if not filename:
79                filename = "%s_%s_%s.ipk" % (pkg, ver, arch)
80            if pkg:
81                output[pkg] = {"arch":arch, "ver":ver,
82                        "filename":filename, "deps": dep, "pkgarch":pkgarch, "provs": prov}
83            pkg = ""
84            arch = ""
85            ver = ""
86            filename = ""
87            dep = []
88            prov = []
89            pkgarch = ""
90
91    return output
92
93def failed_postinsts_abort(pkgs, log_path):
94    bb.fatal("""Postinstall scriptlets of %s have failed. If the intention is to defer them to first boot,
95then please place them into pkg_postinst_ontarget:${PN} ().
96Deferring to first boot via 'exit 1' is no longer supported.
97Details of the failure are in %s.""" %(pkgs, log_path))
98
99def generate_locale_archive(d, rootfs, target_arch, localedir):
100    # Pretty sure we don't need this for locale archive generation but
101    # keeping it to be safe...
102    locale_arch_options = { \
103        "arc": ["--uint32-align=4", "--little-endian"],
104        "arceb": ["--uint32-align=4", "--big-endian"],
105        "arm": ["--uint32-align=4", "--little-endian"],
106        "armeb": ["--uint32-align=4", "--big-endian"],
107        "aarch64": ["--uint32-align=4", "--little-endian"],
108        "aarch64_be": ["--uint32-align=4", "--big-endian"],
109        "sh4": ["--uint32-align=4", "--big-endian"],
110        "powerpc": ["--uint32-align=4", "--big-endian"],
111        "powerpc64": ["--uint32-align=4", "--big-endian"],
112        "powerpc64le": ["--uint32-align=4", "--little-endian"],
113        "mips": ["--uint32-align=4", "--big-endian"],
114        "mipsisa32r6": ["--uint32-align=4", "--big-endian"],
115        "mips64": ["--uint32-align=4", "--big-endian"],
116        "mipsisa64r6": ["--uint32-align=4", "--big-endian"],
117        "mipsel": ["--uint32-align=4", "--little-endian"],
118        "mipsisa32r6el": ["--uint32-align=4", "--little-endian"],
119        "mips64el": ["--uint32-align=4", "--little-endian"],
120        "mipsisa64r6el": ["--uint32-align=4", "--little-endian"],
121        "riscv64": ["--uint32-align=4", "--little-endian"],
122        "riscv32": ["--uint32-align=4", "--little-endian"],
123        "i586": ["--uint32-align=4", "--little-endian"],
124        "i686": ["--uint32-align=4", "--little-endian"],
125        "x86_64": ["--uint32-align=4", "--little-endian"],
126        "loongarch64": ["--uint32-align=4", "--little-endian"]
127    }
128    if target_arch in locale_arch_options:
129        arch_options = locale_arch_options[target_arch]
130    else:
131        bb.error("locale_arch_options not found for target_arch=" + target_arch)
132        bb.fatal("unknown arch:" + target_arch + " for locale_arch_options")
133
134    # Need to set this so cross-localedef knows where the archive is
135    env = dict(os.environ)
136    env["LOCALEARCHIVE"] = oe.path.join(localedir, "locale-archive")
137
138    for name in sorted(os.listdir(localedir)):
139        path = os.path.join(localedir, name)
140        if os.path.isdir(path):
141            cmd = ["cross-localedef", "--verbose"]
142            cmd += arch_options
143            cmd += ["--add-to-archive", path]
144            subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT)
145
146class Indexer(object, metaclass=ABCMeta):
147    def __init__(self, d, deploy_dir):
148        self.d = d
149        self.deploy_dir = deploy_dir
150
151    @abstractmethod
152    def write_index(self):
153        pass
154
155class PkgsList(object, metaclass=ABCMeta):
156    def __init__(self, d, rootfs_dir):
157        self.d = d
158        self.rootfs_dir = rootfs_dir
159
160    @abstractmethod
161    def list_pkgs(self):
162        pass
163
164class PackageManager(object, metaclass=ABCMeta):
165    """
166    This is an abstract class. Do not instantiate this directly.
167    """
168
169    def __init__(self, d, target_rootfs):
170        self.d = d
171        self.target_rootfs = target_rootfs
172        self.deploy_dir = None
173        self.deploy_lock = None
174        self._initialize_intercepts()
175
176    def _initialize_intercepts(self):
177        bb.note("Initializing intercept dir for %s" % self.target_rootfs)
178        # As there might be more than one instance of PackageManager operating at the same time
179        # we need to isolate the intercept_scripts directories from each other,
180        # hence the ugly hash digest in dir name.
181        self.intercepts_dir = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts-%s" %
182                                           (hashlib.sha256(self.target_rootfs.encode()).hexdigest()))
183
184        postinst_intercepts = (self.d.getVar("POSTINST_INTERCEPTS") or "").split()
185        if not postinst_intercepts:
186            postinst_intercepts_path = self.d.getVar("POSTINST_INTERCEPTS_PATH")
187            if not postinst_intercepts_path:
188                postinst_intercepts_path = self.d.getVar("POSTINST_INTERCEPTS_DIR") or self.d.expand("${COREBASE}/scripts/postinst-intercepts")
189            postinst_intercepts = oe.path.which_wild('*', postinst_intercepts_path)
190
191        bb.debug(1, 'Collected intercepts:\n%s' % ''.join('  %s\n' % i for i in postinst_intercepts))
192        bb.utils.remove(self.intercepts_dir, True)
193        bb.utils.mkdirhier(self.intercepts_dir)
194        for intercept in postinst_intercepts:
195            shutil.copy(intercept, os.path.join(self.intercepts_dir, os.path.basename(intercept)))
196
197    @abstractmethod
198    def _handle_intercept_failure(self, failed_script):
199        pass
200
201    def _postpone_to_first_boot(self, postinst_intercept_hook):
202        with open(postinst_intercept_hook) as intercept:
203            registered_pkgs = None
204            for line in intercept.read().split("\n"):
205                m = re.match(r"^##PKGS:(.*)", line)
206                if m is not None:
207                    registered_pkgs = m.group(1).strip()
208                    break
209
210            if registered_pkgs is not None:
211                bb.note("If an image is being built, the postinstalls for the following packages "
212                        "will be postponed for first boot: %s" %
213                        registered_pkgs)
214
215                # call the backend dependent handler
216                self._handle_intercept_failure(registered_pkgs)
217
218
219    def run_intercepts(self, populate_sdk=None):
220        intercepts_dir = self.intercepts_dir
221
222        bb.note("Running intercept scripts:")
223        os.environ['D'] = self.target_rootfs
224        os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE')
225        for script in os.listdir(intercepts_dir):
226            script_full = os.path.join(intercepts_dir, script)
227
228            if script == "postinst_intercept" or not os.access(script_full, os.X_OK):
229                continue
230
231            # we do not want to run any multilib variant of this
232            if script.startswith("delay_to_first_boot"):
233                self._postpone_to_first_boot(script_full)
234                continue
235
236            if populate_sdk == 'host' and self.d.getVar('SDK_OS') == 'mingw32':
237                bb.note("The postinstall intercept hook '%s' could not be executed due to missing wine support, details in %s/log.do_%s"
238                                % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
239                continue
240
241            bb.note("> Executing %s intercept ..." % script)
242
243            try:
244                output = subprocess.check_output(script_full, stderr=subprocess.STDOUT)
245                if output: bb.note(output.decode("utf-8"))
246            except subprocess.CalledProcessError as e:
247                bb.note("Exit code %d. Output:\n%s" % (e.returncode, e.output.decode("utf-8")))
248                if populate_sdk == 'host':
249                    bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
250                elif populate_sdk == 'target':
251                    if "qemuwrapper: qemu usermode is not supported" in e.output.decode("utf-8"):
252                        bb.note("The postinstall intercept hook '%s' could not be executed due to missing qemu usermode support, details in %s/log.do_%s"
253                                % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
254                    else:
255                        bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
256                else:
257                    if "qemuwrapper: qemu usermode is not supported" in e.output.decode("utf-8"):
258                        bb.note("The postinstall intercept hook '%s' could not be executed due to missing qemu usermode support, details in %s/log.do_%s"
259                                % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
260                        self._postpone_to_first_boot(script_full)
261                    else:
262                        bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
263
264    @abstractmethod
265    def update(self):
266        """
267        Update the package manager package database.
268        """
269        pass
270
271    @abstractmethod
272    def install(self, pkgs, attempt_only=False, hard_depends_only=False):
273        """
274        Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is
275        True, installation failures are ignored.
276        """
277        pass
278
279    @abstractmethod
280    def remove(self, pkgs, with_dependencies=True):
281        """
282        Remove a list of packages. 'pkgs' is a list object. If 'with_dependencies'
283        is False, then any dependencies are left in place.
284        """
285        pass
286
287    @abstractmethod
288    def write_index(self):
289        """
290        This function creates the index files
291        """
292        pass
293
294    @abstractmethod
295    def remove_packaging_data(self):
296        pass
297
298    @abstractmethod
299    def list_installed(self):
300        pass
301
302    @abstractmethod
303    def extract(self, pkg):
304        """
305        Returns the path to a tmpdir where resides the contents of a package.
306        Deleting the tmpdir is responsability of the caller.
307        """
308        pass
309
310    @abstractmethod
311    def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs):
312        """
313        Add remote package feeds into repository manager configuration. The parameters
314        for the feeds are set by feed_uris, feed_base_paths and feed_archs.
315        See http://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-PACKAGE_FEED_URIS
316        for their description.
317        """
318        pass
319
320    def install_glob(self, globs, sdk=False):
321        """
322        Install all packages that match a glob.
323        """
324        # TODO don't have sdk here but have a property on the superclass
325        # (and respect in install_complementary)
326        if sdk:
327            pkgdatadir = self.d.getVar("PKGDATA_DIR_SDK")
328        else:
329            pkgdatadir = self.d.getVar("PKGDATA_DIR")
330
331        try:
332            bb.note("Installing globbed packages...")
333            cmd = ["oe-pkgdata-util", "-p", pkgdatadir, "list-pkgs", globs]
334            bb.note('Running %s' % cmd)
335            proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
336            stdout, stderr = proc.communicate()
337            if stderr: bb.note(stderr.decode("utf-8"))
338            pkgs = stdout.decode("utf-8")
339            self.install(pkgs.split(), attempt_only=True)
340        except subprocess.CalledProcessError as e:
341            # Return code 1 means no packages matched
342            if e.returncode != 1:
343                bb.fatal("Could not compute globbed packages list. Command "
344                         "'%s' returned %d:\n%s" %
345                         (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
346
347    def install_complementary(self, globs=None):
348        """
349        Install complementary packages based upon the list of currently installed
350        packages e.g. locales, *-dev, *-dbg, etc. Note: every backend needs to
351        call this function explicitly after the normal package installation.
352        """
353        if globs is None:
354            globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY')
355            split_linguas = set()
356
357            for translation in self.d.getVar('IMAGE_LINGUAS').split():
358                split_linguas.add(translation)
359                split_linguas.add(translation.split('-')[0])
360
361            split_linguas = sorted(split_linguas)
362
363            for lang in split_linguas:
364                globs += " *-locale-%s" % lang
365                for complementary_linguas in (self.d.getVar('IMAGE_LINGUAS_COMPLEMENTARY') or "").split():
366                    globs += (" " + complementary_linguas) % lang
367
368        if globs is None:
369            return
370
371        # we need to write the list of installed packages to a file because the
372        # oe-pkgdata-util reads it from a file
373        with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs:
374            pkgs = self.list_installed()
375
376            provided_pkgs = set()
377            for pkg in pkgs.values():
378                provided_pkgs |= set(pkg.get('provs', []))
379
380            output = oe.utils.format_pkg_list(pkgs, "arch")
381            installed_pkgs.write(output)
382            installed_pkgs.flush()
383
384            cmd = ["oe-pkgdata-util",
385                   "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name,
386                   globs]
387            exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY')
388            if exclude:
389                cmd.extend(['--exclude=' + '|'.join(exclude.split())])
390            try:
391                bb.note('Running %s' % cmd)
392                proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
393                stdout, stderr = proc.communicate()
394                if stderr: bb.note(stderr.decode("utf-8"))
395                complementary_pkgs = stdout.decode("utf-8")
396                complementary_pkgs = set(complementary_pkgs.split())
397                skip_pkgs = sorted(complementary_pkgs & provided_pkgs)
398                install_pkgs = sorted(complementary_pkgs - provided_pkgs)
399                bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % (
400                    ' '.join(install_pkgs),
401                    ' '.join(skip_pkgs)))
402                self.install(install_pkgs, hard_depends_only=True)
403            except subprocess.CalledProcessError as e:
404                bb.fatal("Could not compute complementary packages list. Command "
405                         "'%s' returned %d:\n%s" %
406                         (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
407
408        if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1':
409            target_arch = self.d.getVar('TARGET_ARCH')
410            localedir = oe.path.join(self.target_rootfs, self.d.getVar("libdir"), "locale")
411            if os.path.exists(localedir) and os.listdir(localedir):
412                generate_locale_archive(self.d, self.target_rootfs, target_arch, localedir)
413                # And now delete the binary locales
414                self.remove(fnmatch.filter(self.list_installed(), "glibc-binary-localedata-*"), False)
415
416    def deploy_dir_lock(self):
417        if self.deploy_dir is None:
418            raise RuntimeError("deploy_dir is not set!")
419
420        lock_file_name = os.path.join(self.deploy_dir, "deploy.lock")
421
422        self.deploy_lock = bb.utils.lockfile(lock_file_name)
423
424    def deploy_dir_unlock(self):
425        if self.deploy_lock is None:
426            return
427
428        bb.utils.unlockfile(self.deploy_lock)
429
430        self.deploy_lock = None
431
432    def construct_uris(self, uris, base_paths):
433        """
434        Construct URIs based on the following pattern: uri/base_path where 'uri'
435        and 'base_path' correspond to each element of the corresponding array
436        argument leading to len(uris) x len(base_paths) elements on the returned
437        array
438        """
439        def _append(arr1, arr2, sep='/'):
440            res = []
441            narr1 = [a.rstrip(sep) for a in arr1]
442            narr2 = [a.rstrip(sep).lstrip(sep) for a in arr2]
443            for a1 in narr1:
444                if arr2:
445                    for a2 in narr2:
446                        res.append("%s%s%s" % (a1, sep, a2))
447                else:
448                    res.append(a1)
449            return res
450        return _append(uris, base_paths)
451
452def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencies):
453    """
454    Go through our do_package_write_X dependencies and hardlink the packages we depend
455    upon into the repo directory. This prevents us seeing other packages that may
456    have been built that we don't depend upon and also packages for architectures we don't
457    support.
458    """
459    import errno
460
461    taskdepdata = d.getVar("BB_TASKDEPDATA", False)
462    mytaskname = d.getVar("BB_RUNTASK")
463    pn = d.getVar("PN")
464    seendirs = set()
465    multilibs = {}
466
467    bb.utils.remove(subrepo_dir, recurse=True)
468    bb.utils.mkdirhier(subrepo_dir)
469
470    # Detect bitbake -b usage
471    nodeps = d.getVar("BB_LIMITEDDEPS") or False
472    if nodeps or not filterbydependencies:
473        for arch in d.getVar("ALL_MULTILIB_PACKAGE_ARCHS").split() + d.getVar("ALL_MULTILIB_PACKAGE_ARCHS").replace("-", "_").split():
474            target = os.path.join(deploydir + "/" + arch)
475            if os.path.exists(target):
476                oe.path.symlink(target, subrepo_dir + "/" + arch, True)
477        return
478
479    start = None
480    for dep in taskdepdata:
481        data = taskdepdata[dep]
482        if data[1] == mytaskname and data[0] == pn:
483            start = dep
484            break
485    if start is None:
486        bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?")
487    pkgdeps = set()
488    start = [start]
489    seen = set(start)
490    # Support direct dependencies (do_rootfs -> do_package_write_X)
491    # or indirect dependencies within PN (do_populate_sdk_ext -> do_rootfs -> do_package_write_X)
492    while start:
493        next = []
494        for dep2 in start:
495            for dep in taskdepdata[dep2][3]:
496                if taskdepdata[dep][0] != pn:
497                    if "do_" + taskname in dep:
498                        pkgdeps.add(dep)
499                elif dep not in seen:
500                    next.append(dep)
501                    seen.add(dep)
502        start = next
503
504    for dep in pkgdeps:
505        c = taskdepdata[dep][0]
506        manifest, d2 = oe.sstatesig.find_sstate_manifest(c, taskdepdata[dep][2], taskname, d, multilibs)
507        if not manifest:
508            bb.fatal("No manifest generated from: %s in %s" % (c, taskdepdata[dep][2]))
509        if not os.path.exists(manifest):
510            continue
511        with open(manifest, "r") as f:
512            for l in f:
513                l = l.strip()
514                deploydir = os.path.normpath(deploydir)
515                if bb.data.inherits_class('packagefeed-stability', d):
516                    dest = l.replace(deploydir + "-prediff", "")
517                else:
518                    dest = l.replace(deploydir, "")
519                dest = subrepo_dir + dest
520                if l.endswith("/"):
521                    if dest not in seendirs:
522                        bb.utils.mkdirhier(dest)
523                        seendirs.add(dest)
524                    continue
525                # Try to hardlink the file, copy if that fails
526                destdir = os.path.dirname(dest)
527                if destdir not in seendirs:
528                    bb.utils.mkdirhier(destdir)
529                    seendirs.add(destdir)
530                try:
531                    os.link(l, dest)
532                except OSError as err:
533                    if err.errno == errno.EXDEV:
534                        bb.utils.copyfile(l, dest)
535                    else:
536                        raise
537
538
539def generate_index_files(d):
540    from oe.package_manager.rpm import RpmSubdirIndexer
541    from oe.package_manager.ipk import OpkgIndexer
542    from oe.package_manager.deb import DpkgIndexer
543
544    classes = d.getVar('PACKAGE_CLASSES').replace("package_", "").split()
545
546    indexer_map = {
547        "rpm": (RpmSubdirIndexer, d.getVar('DEPLOY_DIR_RPM')),
548        "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK')),
549        "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB'))
550    }
551
552    result = None
553
554    for pkg_class in classes:
555        if not pkg_class in indexer_map:
556            continue
557
558        if os.path.exists(indexer_map[pkg_class][1]):
559            result = indexer_map[pkg_class][0](d, indexer_map[pkg_class][1]).write_index()
560
561            if result is not None:
562                bb.fatal(result)
563