1# 2# Copyright OpenEmbedded Contributors 3# 4# SPDX-License-Identifier: GPL-2.0-only 5# 6 7import bb 8import collections 9import json 10import oe.packagedata 11import re 12import shutil 13 14from pathlib import Path 15 16 17LIC_REGEX = re.compile( 18 rb"^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$", 19 re.MULTILINE, 20) 21 22 23def extract_licenses(filename): 24 """ 25 Extract SPDX License identifiers from a file 26 """ 27 try: 28 with open(filename, "rb") as f: 29 size = min(15000, os.stat(filename).st_size) 30 txt = f.read(size) 31 licenses = re.findall(LIC_REGEX, txt) 32 if licenses: 33 ascii_licenses = [lic.decode("ascii") for lic in licenses] 34 return ascii_licenses 35 except Exception as e: 36 bb.warn(f"Exception reading {filename}: {e}") 37 return [] 38 39 40def is_work_shared_spdx(d): 41 return '/work-shared/' in d.getVar('S') 42 43 44def load_spdx_license_data(d): 45 with open(d.getVar("SPDX_LICENSES"), "r") as f: 46 data = json.load(f) 47 # Transform the license array to a dictionary 48 data["licenses"] = {l["licenseId"]: l for l in data["licenses"]} 49 50 return data 51 52 53def process_sources(d): 54 """ 55 Returns True if the sources for this recipe should be included in the SPDX 56 or False if not 57 """ 58 pn = d.getVar("PN") 59 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() 60 if pn in assume_provided: 61 for p in d.getVar("PROVIDES").split(): 62 if p != pn: 63 pn = p 64 break 65 66 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, 67 # so avoid archiving source here. 68 if pn.startswith("glibc-locale"): 69 return False 70 if d.getVar("PN") == "libtool-cross": 71 return False 72 if d.getVar("PN") == "libgcc-initial": 73 return False 74 if d.getVar("PN") == "shadow-sysroot": 75 return False 76 77 return True 78 79 80Dep = collections.namedtuple("Dep", ["pn", "hashfn", "in_taskhash"]) 81 82 83def collect_direct_deps(d, dep_task): 84 """ 85 Find direct dependencies of current task 86 87 Returns the list of recipes that have a dep_task that the current task 88 depends on 89 """ 90 current_task = "do_" + d.getVar("BB_CURRENTTASK") 91 pn = d.getVar("PN") 92 93 taskdepdata = d.getVar("BB_TASKDEPDATA", False) 94 95 for this_dep in taskdepdata.values(): 96 if this_dep[0] == pn and this_dep[1] == current_task: 97 break 98 else: 99 bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata") 100 101 deps = set() 102 103 for dep_name in this_dep.deps: 104 dep_data = taskdepdata[dep_name] 105 if dep_data.taskname == dep_task and dep_data.pn != pn: 106 deps.add((dep_data.pn, dep_data.hashfn, dep_name in this_dep.taskhash_deps)) 107 108 return sorted(deps) 109 110 111def get_spdx_deps(d): 112 """ 113 Reads the SPDX dependencies JSON file and returns the data 114 """ 115 spdx_deps_file = Path(d.getVar("SPDXDEPS")) 116 117 deps = [] 118 with spdx_deps_file.open("r") as f: 119 for d in json.load(f): 120 deps.append(Dep(*d)) 121 return deps 122 123 124def collect_package_providers(d): 125 """ 126 Returns a dictionary where each RPROVIDES is mapped to the package that 127 provides it 128 """ 129 deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX")) 130 131 providers = {} 132 133 deps = collect_direct_deps(d, "do_create_spdx") 134 deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True)) 135 136 for dep_pn, dep_hashfn, _ in deps: 137 localdata = d 138 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) 139 if not recipe_data: 140 localdata = bb.data.createCopy(d) 141 localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}") 142 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) 143 144 for pkg in recipe_data.get("PACKAGES", "").split(): 145 pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata) 146 rprovides = set( 147 n 148 for n, _ in bb.utils.explode_dep_versions2( 149 pkg_data.get("RPROVIDES", "") 150 ).items() 151 ) 152 rprovides.add(pkg) 153 154 if "PKG" in pkg_data: 155 pkg = pkg_data["PKG"] 156 rprovides.add(pkg) 157 158 for r in rprovides: 159 providers[r] = (pkg, dep_hashfn) 160 161 return providers 162 163 164def get_patched_src(d): 165 """ 166 Save patched source of the recipe in SPDX_WORKDIR. 167 """ 168 spdx_workdir = d.getVar("SPDXWORK") 169 spdx_sysroot_native = d.getVar("STAGING_DIR_NATIVE") 170 pn = d.getVar("PN") 171 172 workdir = d.getVar("WORKDIR") 173 174 try: 175 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR 176 if not is_work_shared_spdx(d): 177 # Change the WORKDIR to make do_unpack do_patch run in another dir. 178 d.setVar("WORKDIR", spdx_workdir) 179 # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). 180 d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native) 181 182 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the 183 # possibly requiring of the following tasks (such as some recipes's 184 # do_patch required 'B' existed). 185 bb.utils.mkdirhier(d.getVar("B")) 186 187 bb.build.exec_func("do_unpack", d) 188 189 if d.getVar("SRC_URI") != "": 190 bb.build.exec_func("do_patch", d) 191 192 # Copy source from work-share to spdx_workdir 193 if is_work_shared_spdx(d): 194 share_src = d.getVar('S') 195 d.setVar("WORKDIR", spdx_workdir) 196 d.setVar("STAGING_DIR_NATIVE", spdx_sysroot_native) 197 # Copy source to ${SPDXWORK}, same basename dir of ${S}; 198 src_dir = ( 199 spdx_workdir 200 + "/" 201 + os.path.basename(share_src) 202 ) 203 # For kernel souce, rename suffix dir 'kernel-source' 204 # to ${BP} (${BPN}-${PV}) 205 if bb.data.inherits_class("kernel", d): 206 src_dir = spdx_workdir + "/" + d.getVar('BP') 207 208 bb.note(f"copyhardlinktree {share_src} to {src_dir}") 209 oe.path.copyhardlinktree(share_src, src_dir) 210 211 # Some userland has no source. 212 if not os.path.exists(spdx_workdir): 213 bb.utils.mkdirhier(spdx_workdir) 214 finally: 215 d.setVar("WORKDIR", workdir) 216 217 218def has_task(d, task): 219 return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False)) 220 221 222def fetch_data_to_uri(fd, name): 223 """ 224 Translates a bitbake FetchData to a string URI 225 """ 226 uri = fd.type 227 # Map gitsm to git, since gitsm:// is not a valid URI protocol 228 if uri == "gitsm": 229 uri = "git" 230 proto = getattr(fd, "proto", None) 231 if proto is not None: 232 uri = uri + "+" + proto 233 uri = uri + "://" + fd.host + fd.path 234 235 if fd.method.supports_srcrev(): 236 uri = uri + "@" + fd.revisions[name] 237 238 return uri 239