xref: /openbmc/openbmc/poky/meta/classes-global/base.bbclass (revision c9537f57ab488bf5d90132917b0184e2527970a5)
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7BB_DEFAULT_TASK ?= "build"
8CLASSOVERRIDE ?= "class-target"
9
10inherit patch
11inherit staging
12
13inherit mirrors
14inherit utils
15inherit utility-tasks
16inherit logging
17
18PACKAGECONFIG_CONFARGS ??= ""
19
20inherit metadata_scm
21
22def lsb_distro_identifier(d):
23    adjust = d.getVar('LSB_DISTRO_ADJUST')
24    adjust_func = None
25    if adjust:
26        try:
27            adjust_func = globals()[adjust]
28        except KeyError:
29            pass
30    return oe.lsb.distro_identifier(adjust_func)
31
32die() {
33	bbfatal_log "$*"
34}
35
36oe_runmake_call() {
37	bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
38	${MAKE} ${EXTRA_OEMAKE} "$@"
39}
40
41oe_runmake() {
42	oe_runmake_call "$@" || die "oe_runmake failed"
43}
44
45
46def get_base_dep(d):
47    if d.getVar('INHIBIT_DEFAULT_DEPS', False):
48        return ""
49    return "${BASE_DEFAULT_DEPS}"
50
51BASE_DEFAULT_DEPS = "virtual/cross-cc virtual/compilerlibs virtual/libc"
52
53BASEDEPENDS = ""
54BASEDEPENDS:class-target = "${@get_base_dep(d)}"
55BASEDEPENDS:class-nativesdk = "${@get_base_dep(d)}"
56
57DEPENDS:prepend = "${BASEDEPENDS} "
58
59FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
60# THISDIR only works properly with imediate expansion as it has to run
61# in the context of the location its used (:=)
62THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
63
64def extra_path_elements(d):
65    path = ""
66    elements = (d.getVar('EXTRANATIVEPATH') or "").split()
67    for e in elements:
68        path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
69    return path
70
71PATH:prepend = "${@extra_path_elements(d)}"
72
73def get_lic_checksum_file_list(d):
74    filelist = []
75    lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
76    tmpdir = d.getVar("TMPDIR")
77    s = d.getVar("S")
78    b = d.getVar("B")
79    workdir = d.getVar("WORKDIR")
80
81    urls = lic_files.split()
82    for url in urls:
83        # We only care about items that are absolute paths since
84        # any others should be covered by SRC_URI.
85        try:
86            (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
87            if method != "file" or not path:
88                raise bb.fetch.MalformedUrl(url)
89
90            if path[0] == '/':
91                if path.startswith((tmpdir, s, b, workdir)):
92                    continue
93                filelist.append(path + ":" + str(os.path.exists(path)))
94        except bb.fetch.MalformedUrl:
95            bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
96    return " ".join(filelist)
97
98def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
99    tools = d.getVar(toolsvar).split()
100    origbbenv = d.getVar("BB_ORIGENV", False)
101    path = origbbenv.getVar("PATH")
102    # Need to ignore our own scripts directories to avoid circular links
103    for p in path.split(":"):
104        if p.endswith("/scripts"):
105            path = path.replace(p, "/ignoreme")
106    bb.utils.mkdirhier(dest)
107    notfound = []
108    for tool in tools:
109        desttool = os.path.join(dest, tool)
110        if not os.path.exists(desttool):
111            # clean up dead symlink
112            if os.path.islink(desttool):
113                os.unlink(desttool)
114            srctool = bb.utils.which(path, tool, executable=True)
115            # gcc/g++ may link to ccache on some hosts, e.g.,
116            # /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc)
117            # would return /usr/local/bin/ccache/gcc, but what we need is
118            # /usr/bin/gcc, this code can check and fix that.
119            if os.path.islink(srctool) and os.path.basename(os.readlink(srctool)) == 'ccache':
120                srctool = bb.utils.which(path, tool, executable=True, direction=1)
121            if srctool:
122                os.symlink(srctool, desttool)
123            else:
124                notfound.append(tool)
125
126    if notfound and fatal:
127        bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n  %s" % " ".join(notfound))
128
129# We can't use vardepvalue against do_fetch directly since that would overwrite
130# the other task dependencies so we use an indirect function.
131python fetcher_hashes_dummyfunc() {
132    return
133}
134fetcher_hashes_dummyfunc[vardepvalue] = "${@bb.fetch.get_hashvalue(d)}"
135
136addtask fetch
137do_fetch[dirs] = "${DL_DIR}"
138do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
139do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
140do_fetch[prefuncs] += "fetcher_hashes_dummyfunc"
141do_fetch[network] = "1"
142python base_do_fetch() {
143
144    src_uri = (d.getVar('SRC_URI') or "").split()
145    if not src_uri:
146        return
147
148    try:
149        fetcher = bb.fetch2.Fetch(src_uri, d)
150        fetcher.download()
151    except bb.fetch2.BBFetchException as e:
152        bb.fatal("Bitbake Fetcher Error: " + repr(e))
153}
154
155addtask unpack after do_fetch
156do_unpack[cleandirs] = "${UNPACKDIR}"
157
158python base_do_unpack() {
159    import shutil
160
161    sourcedir = d.getVar('S')
162    # Intentionally keep SOURCE_BASEDIR internal to the task just for SDE
163    d.setVar("SOURCE_BASEDIR", sourcedir)
164
165    src_uri = (d.getVar('SRC_URI') or "").split()
166    if not src_uri:
167        return
168
169    basedir = None
170    unpackdir = d.getVar('UNPACKDIR')
171    workdir = d.getVar('WORKDIR')
172    if sourcedir.startswith(workdir) and not sourcedir.startswith(unpackdir):
173        basedir = sourcedir.replace(workdir, '').strip("/").split('/')[0]
174        if basedir:
175            bb.utils.remove(workdir + '/' + basedir, True)
176            d.setVar("SOURCE_BASEDIR", workdir + '/' + basedir)
177
178    try:
179        fetcher = bb.fetch2.Fetch(src_uri, d)
180        fetcher.unpack(d.getVar('UNPACKDIR'))
181    except bb.fetch2.BBFetchException as e:
182        bb.fatal("Bitbake Fetcher Error: " + repr(e))
183
184    if basedir and os.path.exists(unpackdir + '/' + basedir):
185        # Compatibility magic to ensure ${WORKDIR}/git and ${WORKDIR}/${BP}
186        # as often used in S work as expected.
187        shutil.move(unpackdir + '/' + basedir, workdir + '/' + basedir)
188}
189
190SSTATETASKS += "do_deploy_source_date_epoch"
191
192do_deploy_source_date_epoch () {
193    mkdir -p ${SDE_DEPLOYDIR}
194    if [ -e ${SDE_FILE} ]; then
195        echo "Deploying SDE from ${SDE_FILE} -> ${SDE_DEPLOYDIR}."
196        cp -p ${SDE_FILE} ${SDE_DEPLOYDIR}/__source_date_epoch.txt
197    else
198        echo "${SDE_FILE} not found!"
199    fi
200}
201
202python do_deploy_source_date_epoch_setscene () {
203    sstate_setscene(d)
204    bb.utils.mkdirhier(d.getVar('SDE_DIR'))
205    sde_file = os.path.join(d.getVar('SDE_DEPLOYDIR'), '__source_date_epoch.txt')
206    if os.path.exists(sde_file):
207        target = d.getVar('SDE_FILE')
208        bb.debug(1, "Moving setscene SDE file %s -> %s" % (sde_file, target))
209        bb.utils.rename(sde_file, target)
210    else:
211        bb.debug(1, "%s not found!" % sde_file)
212}
213
214do_deploy_source_date_epoch[dirs] = "${SDE_DEPLOYDIR}"
215do_deploy_source_date_epoch[sstate-plaindirs] = "${SDE_DEPLOYDIR}"
216addtask do_deploy_source_date_epoch_setscene
217addtask do_deploy_source_date_epoch before do_configure after do_patch
218
219python create_source_date_epoch_stamp() {
220    # Version: 2
221    source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('SOURCE_BASEDIR') or d.getVar('S'))
222    oe.reproducible.epochfile_write(source_date_epoch, d.getVar('SDE_FILE'), d)
223}
224do_unpack[postfuncs] += "create_source_date_epoch_stamp"
225
226def get_source_date_epoch_value(d):
227    return oe.reproducible.epochfile_read(d.getVar('SDE_FILE'), d)
228
229def get_layers_branch_rev(d):
230    revisions = oe.buildcfg.get_layer_revisions(d)
231    layers_branch_rev = ["%-20s = \"%s:%s\"" % (r[1], r[2], r[3]) for r in revisions]
232    i = len(layers_branch_rev)-1
233    p1 = layers_branch_rev[i].find("=")
234    s1 = layers_branch_rev[i][p1:]
235    while i > 0:
236        p2 = layers_branch_rev[i-1].find("=")
237        s2= layers_branch_rev[i-1][p2:]
238        if s1 == s2:
239            layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
240            i -= 1
241        else:
242            i -= 1
243            p1 = layers_branch_rev[i].find("=")
244            s1= layers_branch_rev[i][p1:]
245    return layers_branch_rev
246
247
248BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
249BUILDCFG_FUNCS[type] = "list"
250
251def buildcfg_vars(d):
252    statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
253    for var in statusvars:
254        value = d.getVar(var)
255        if value is not None:
256            yield '%-20s = "%s"' % (var, value)
257
258def buildcfg_neededvars(d):
259    needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
260    pesteruser = []
261    for v in needed_vars:
262        val = d.getVar(v)
263        if not val or val == 'INVALID':
264            pesteruser.append(v)
265
266    if pesteruser:
267        bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
268
269addhandler base_eventhandler
270base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed"
271python base_eventhandler() {
272    import bb.runqueue
273
274    if isinstance(e, bb.event.ConfigParsed):
275        if not d.getVar("NATIVELSBSTRING", False):
276            d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d))
277        d.setVar("ORIGNATIVELSBSTRING", d.getVar("NATIVELSBSTRING", False))
278        d.setVar('BB_VERSION', bb.__version__)
279
280    # There might be no bb.event.ConfigParsed event if bitbake server is
281    # running, so check bb.event.BuildStarted too to make sure ${HOSTTOOLS_DIR}
282    # exists.
283    if isinstance(e, bb.event.ConfigParsed) or \
284            (isinstance(e, bb.event.BuildStarted) and not os.path.exists(d.getVar('HOSTTOOLS_DIR'))):
285        # Works with the line in layer.conf which changes PATH to point here
286        setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d)
287        setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False)
288
289    if isinstance(e, bb.event.MultiConfigParsed):
290        # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores
291        # own contexts so the variables get expanded correctly for that arch, then inject back into
292        # the main data store.
293        deps = []
294        for config in e.mcdata:
295            deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS"))
296        deps = " ".join(deps)
297        e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps)
298
299    if isinstance(e, bb.event.BuildStarted):
300        localdata = bb.data.createCopy(d)
301        statuslines = []
302        for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
303            g = globals()
304            if func not in g:
305                bb.warn("Build configuration function '%s' does not exist" % func)
306            else:
307                flines = g[func](localdata)
308                if flines:
309                    statuslines.extend(flines)
310
311        statusheader = d.getVar('BUILDCFG_HEADER')
312        if statusheader:
313            bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
314
315    if isinstance(e, bb.event.RecipeParsed):
316        #
317        # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
318        # skip parsing for all the other providers which will mean they get uninstalled from the
319        # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
320        # particular.
321        #
322        pn = d.getVar('PN')
323        source_mirror_fetch = bb.utils.to_boolean(d.getVar('SOURCE_MIRROR_FETCH', False))
324        if not source_mirror_fetch:
325            provs = (d.getVar("PROVIDES") or "").split()
326            multiprovidersallowed = (d.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split()
327            for p in provs:
328                if p.startswith("virtual/") and p not in multiprovidersallowed:
329                    profprov = d.getVar("PREFERRED_PROVIDER_" + p)
330                    if profprov and pn != profprov:
331                        raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
332}
333
334CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
335CLEANBROKEN = "0"
336
337addtask configure after do_patch
338do_configure[dirs] = "${B}"
339base_do_configure() {
340	if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
341		if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
342			cd ${B}
343			if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
344				oe_runmake clean
345			fi
346			# -ignore_readdir_race does not work correctly with -delete;
347			# use xargs to avoid spurious build failures
348			find ${B} -ignore_readdir_race -name \*.la -type f -print0 | xargs -0 rm -f
349		fi
350	fi
351	if [ -n "${CONFIGURESTAMPFILE}" ]; then
352		mkdir -p `dirname ${CONFIGURESTAMPFILE}`
353		echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
354	fi
355}
356
357addtask compile after do_configure
358do_compile[dirs] = "${B}"
359base_do_compile() {
360	if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
361		oe_runmake || die "make failed"
362	else
363		bbnote "nothing to compile"
364	fi
365}
366
367addtask install after do_compile
368do_install[dirs] = "${B}"
369# Remove and re-create ${D} so that it is guaranteed to be empty
370do_install[cleandirs] = "${D}"
371
372base_do_install() {
373	:
374}
375
376addtask build after do_populate_sysroot
377do_build[noexec] = "1"
378do_build[recrdeptask] += "do_deploy"
379do_build () {
380	:
381}
382
383def set_packagetriplet(d):
384    archs = []
385    tos = []
386    tvs = []
387
388    archs.append(d.getVar("PACKAGE_ARCHS").split())
389    tos.append(d.getVar("TARGET_OS"))
390    tvs.append(d.getVar("TARGET_VENDOR"))
391
392    def settriplet(d, varname, archs, tos, tvs):
393        triplets = []
394        for i in range(len(archs)):
395            for arch in archs[i]:
396                triplets.append(arch + tvs[i] + "-" + tos[i])
397        triplets.reverse()
398        d.setVar(varname, " ".join(triplets))
399
400    settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
401
402    variants = d.getVar("MULTILIB_VARIANTS") or ""
403    for item in variants.split():
404        localdata = bb.data.createCopy(d)
405        overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
406        localdata.setVar("OVERRIDES", overrides)
407
408        archs.append(localdata.getVar("PACKAGE_ARCHS").split())
409        tos.append(localdata.getVar("TARGET_OS"))
410        tvs.append(localdata.getVar("TARGET_VENDOR"))
411
412    settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
413
414python () {
415    import string, re
416
417    # Handle backfilling
418    oe.utils.features_backfill("DISTRO_FEATURES", d)
419    oe.utils.features_backfill("MACHINE_FEATURES", d)
420
421    # To add a recipe to the skip list , set:
422    #   SKIP_RECIPE[pn] = "message"
423    pn = d.getVar('PN')
424    skip_msg = d.getVarFlag('SKIP_RECIPE', pn)
425    if skip_msg:
426        bb.debug(1, "Skipping %s %s" % (pn, skip_msg))
427        raise bb.parse.SkipRecipe("Recipe will be skipped because: %s" % (skip_msg))
428
429    # Handle PACKAGECONFIG
430    #
431    # These take the form:
432    #
433    # PACKAGECONFIG ??= "<default options>"
434    # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends,foo_conflict_packageconfig"
435    pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
436    if pkgconfigflags:
437        pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
438        pn = d.getVar("PN")
439
440        mlprefix = d.getVar("MLPREFIX")
441
442        def expandFilter(appends, extension, prefix):
443            appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
444            newappends = []
445            for a in appends:
446                if a.endswith("-native") or ("-cross-" in a):
447                    newappends.append(a)
448                elif a.startswith("virtual/"):
449                    subs = a.split("/", 1)[1]
450                    if subs.startswith(prefix):
451                        newappends.append(a + extension)
452                    else:
453                        newappends.append("virtual/" + prefix + subs + extension)
454                else:
455                    if a.startswith(prefix):
456                        newappends.append(a + extension)
457                    else:
458                        newappends.append(prefix + a + extension)
459            return newappends
460
461        def appendVar(varname, appends):
462            if not appends:
463                return
464            if "DEPENDS" in varname or varname.startswith("RRECOMMENDS"):
465                if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
466                    appends = expandFilter(appends, "", "nativesdk-")
467                elif bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d):
468                    appends = expandFilter(appends, "-native", "")
469                elif mlprefix:
470                    appends = expandFilter(appends, "", mlprefix)
471            varname = d.expand(varname)
472            d.appendVar(varname, " " + " ".join(appends))
473
474        extradeps = []
475        extrardeps = []
476        extrarrecs = []
477        extraconf = []
478        for flag, flagval in sorted(pkgconfigflags.items()):
479            items = flagval.split(",")
480            num = len(items)
481            if num > 6:
482                bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend,conflict_packageconfig can be specified!"
483                    % (d.getVar('PN'), flag))
484
485            if flag in pkgconfig:
486                if num >= 3 and items[2]:
487                    extradeps.append(items[2])
488                if num >= 4 and items[3]:
489                    extrardeps.append(items[3])
490                if num >= 5 and items[4]:
491                    extrarrecs.append(items[4])
492                if num >= 1 and items[0]:
493                    extraconf.append(items[0])
494            elif num >= 2 and items[1]:
495                    extraconf.append(items[1])
496
497            if num >= 6 and items[5]:
498                conflicts = set(items[5].split())
499                invalid = conflicts.difference(set(pkgconfigflags.keys()))
500                if invalid:
501                    bb.error("%s: PACKAGECONFIG[%s] Invalid conflict package config%s '%s' specified."
502                        % (d.getVar('PN'), flag, 's' if len(invalid) > 1 else '', ' '.join(invalid)))
503
504                if flag in pkgconfig:
505                    intersec = conflicts.intersection(set(pkgconfig))
506                    if intersec:
507                        bb.fatal("%s: PACKAGECONFIG[%s] Conflict package config%s '%s' set in PACKAGECONFIG."
508                            % (d.getVar('PN'), flag, 's' if len(intersec) > 1 else '', ' '.join(intersec)))
509
510        appendVar('DEPENDS', extradeps)
511        appendVar('RDEPENDS:${PN}', extrardeps)
512        appendVar('RRECOMMENDS:${PN}', extrarrecs)
513        appendVar('PACKAGECONFIG_CONFARGS', extraconf)
514
515    pn = d.getVar('PN')
516    license = d.getVar('LICENSE')
517    if license == "INVALID" and pn != "defaultpkgname":
518        bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
519
520    if bb.data.inherits_class('license', d):
521        oe.license.check_license_format(d)
522        unmatched_license_flags = oe.license.check_license_flags(d)
523        if unmatched_license_flags:
524            for unmatched in unmatched_license_flags:
525                message = "Has a restricted license '%s' which is not listed in your LICENSE_FLAGS_ACCEPTED." % unmatched
526                details = d.getVarFlag("LICENSE_FLAGS_DETAILS", unmatched)
527                if details:
528                    message += "\n" + details
529            bb.debug(1, "Skipping %s: %s" % (pn, message))
530            raise bb.parse.SkipRecipe(message)
531
532    # If we're building a target package we need to use fakeroot (pseudo)
533    # in order to capture permissions, owners, groups and special files
534    if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
535        d.appendVarFlag('do_prepare_recipe_sysroot', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
536        d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
537        d.setVarFlag('do_install', 'fakeroot', '1')
538        d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
539        d.setVarFlag('do_package', 'fakeroot', '1')
540        d.setVarFlag('do_package_setscene', 'fakeroot', '1')
541        d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
542        d.setVarFlag('do_devshell', 'fakeroot', '1')
543        d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
544
545    need_machine = d.getVar('COMPATIBLE_MACHINE')
546    if need_machine and not bb.utils.to_boolean(d.getVar('PARSE_ALL_RECIPES', False)):
547        import re
548        compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
549        for m in compat_machines:
550            if re.match(need_machine, m):
551                break
552        else:
553            raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
554
555    source_mirror_fetch = bb.utils.to_boolean(d.getVar('SOURCE_MIRROR_FETCH', False)) or \
556            bb.utils.to_boolean(d.getVar('PARSE_ALL_RECIPES', False))
557    if not source_mirror_fetch:
558        need_host = d.getVar('COMPATIBLE_HOST')
559        if need_host:
560            import re
561            this_host = d.getVar('HOST_SYS')
562            if not re.match(need_host, this_host):
563                raise bb.parse.SkipRecipe("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
564
565        bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
566
567        pkgs = d.getVar('PACKAGES').split()
568        if pkgs:
569            skipped_pkgs = oe.license.skip_incompatible_package_licenses(d, pkgs)
570            unskipped_pkgs = [p for p in pkgs if p not in skipped_pkgs]
571
572            if unskipped_pkgs:
573                for pkg in skipped_pkgs:
574                    bb.debug(1, "Skipping the package %s at do_rootfs because of incompatible license(s): %s" % (pkg, ' '.join(skipped_pkgs[pkg])))
575                for pkg in unskipped_pkgs:
576                    bb.debug(1, "Including the package %s" % pkg)
577            else:
578                incompatible_lic = oe.license.incompatible_license(d, bad_licenses)
579                for pkg in skipped_pkgs:
580                    incompatible_lic += skipped_pkgs[pkg]
581                incompatible_lic = sorted(list(set(incompatible_lic)))
582
583                if incompatible_lic:
584                    bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic)))
585                    raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic))
586
587    srcuri = d.getVar('SRC_URI')
588    for uri_string in srcuri.split():
589        uri = bb.fetch.URI(uri_string)
590        # Also check downloadfilename as the URL path might not be useful for sniffing
591        path = uri.params.get("downloadfilename", uri.path)
592
593        # HTTP/FTP use the wget fetcher
594        if uri.scheme in ("http", "https", "ftp"):
595            d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot')
596
597        # Svn packages should DEPEND on subversion-native
598        if uri.scheme == "svn":
599            d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
600
601        # Git packages should DEPEND on git-native
602        elif uri.scheme in ("git", "gitsm"):
603            d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
604
605        # Mercurial packages should DEPEND on mercurial-native
606        elif uri.scheme == "hg":
607            d.appendVar("EXTRANATIVEPATH", ' python3-native ')
608            d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot ca-certificates-native:do_populate_sysroot')
609
610        # OSC packages should DEPEND on osc-native
611        elif uri.scheme == "osc":
612            d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
613
614        elif uri.scheme == "npm":
615            d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
616
617        elif uri.scheme == "repo":
618            d.appendVarFlag('do_fetch', 'depends', ' repo-native:do_populate_sysroot')
619
620        # *.lz4 should DEPEND on lz4-native for unpacking
621        if path.endswith('.lz4'):
622            d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
623
624        # *.zst should DEPEND on zstd-native for unpacking
625        elif path.endswith('.zst'):
626            d.appendVarFlag('do_unpack', 'depends', ' zstd-native:do_populate_sysroot')
627
628        # *.lz should DEPEND on lzip-native for unpacking
629        elif path.endswith('.lz'):
630            d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
631
632        # *.xz should DEPEND on xz-native for unpacking
633        elif path.endswith('.xz') or path.endswith('.txz'):
634            d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
635
636        # .zip should DEPEND on unzip-native for unpacking
637        elif path.endswith('.zip') or path.endswith('.jar'):
638            d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
639
640        # Some rpm files may be compressed internally using xz (for example, rpms from Fedora)
641        elif path.endswith('.rpm'):
642            d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
643
644        # *.deb should DEPEND on xz-native for unpacking
645        elif path.endswith('.deb'):
646            d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
647
648        # *.7z should DEPEND on 7zip-native for unpacking
649        elif path.endswith('.7z'):
650            d.appendVarFlag('do_unpack', 'depends', ' 7zip-native:do_populate_sysroot')
651
652    set_packagetriplet(d)
653
654    # 'multimachine' handling
655    mach_arch = d.getVar('MACHINE_ARCH')
656    pkg_arch = d.getVar('PACKAGE_ARCH')
657
658    if (pkg_arch == mach_arch):
659        # Already machine specific - nothing further to do
660        return
661
662    #
663    # We always try to scan SRC_URI for urls with machine overrides
664    # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
665    #
666    override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
667    if override != '0':
668        paths = []
669        fpaths = (d.getVar('FILESPATH') or '').split(':')
670        machine = d.getVar('MACHINE')
671        for p in fpaths:
672            if os.path.basename(p) == machine and os.path.isdir(p):
673                paths.append(p)
674
675        if paths:
676            for s in srcuri.split():
677                if not s.startswith("file://"):
678                    continue
679                fetcher = bb.fetch2.Fetch([s], d)
680                local = fetcher.localpath(s)
681                for mp in paths:
682                    if local.startswith(mp):
683                        #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
684                        d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
685                        return
686
687    packages = d.getVar('PACKAGES').split()
688    for pkg in packages:
689        pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
690
691        # We could look for != PACKAGE_ARCH here but how to choose
692        # if multiple differences are present?
693        # Look through PACKAGE_ARCHS for the priority order?
694        if pkgarch and pkgarch == mach_arch:
695            d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
696            bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
697}
698
699addtask cleansstate after do_clean
700python do_cleansstate() {
701        sstate_clean_cachefiles(d)
702}
703addtask cleanall after do_cleansstate
704do_cleansstate[nostamp] = "1"
705
706python do_cleanall() {
707    src_uri = (d.getVar('SRC_URI') or "").split()
708    if not src_uri:
709        return
710
711    try:
712        fetcher = bb.fetch2.Fetch(src_uri, d)
713        fetcher.clean()
714    except bb.fetch2.BBFetchException as e:
715        bb.fatal(str(e))
716}
717do_cleanall[nostamp] = "1"
718
719
720EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install
721