xref: /openbmc/openbmc/poky/bitbake/lib/bb/fetch2/npmsw.py (revision 96e4b4e121e0e2da1535d7d537d6a982a6ff5bc0)
1# Copyright (C) 2020 Savoir-Faire Linux
2#
3# SPDX-License-Identifier: GPL-2.0-only
4#
5"""
6BitBake 'Fetch' npm shrinkwrap implementation
7
8npm fetcher support the SRC_URI with format of:
9SRC_URI = "npmsw://some.registry.url;OptionA=xxx;OptionB=xxx;..."
10
11Supported SRC_URI options are:
12
13- dev
14   Set to 1 to also install devDependencies.
15
16- destsuffix
17    Specifies the directory to use to unpack the dependencies (default: ${S}).
18"""
19
20import json
21import os
22import re
23import bb
24from bb.fetch2 import Fetch
25from bb.fetch2 import FetchMethod
26from bb.fetch2 import ParameterError
27from bb.fetch2 import runfetchcmd
28from bb.fetch2 import URI
29from bb.fetch2.npm import npm_integrity
30from bb.fetch2.npm import npm_localfile
31from bb.fetch2.npm import npm_unpack
32from bb.utils import is_semver
33from bb.utils import lockfile
34from bb.utils import unlockfile
35
36def foreach_dependencies(shrinkwrap, callback=None, dev=False):
37    """
38        Run a callback for each dependencies of a shrinkwrap file.
39        The callback is using the format:
40            callback(name, data, location)
41        with:
42            name = the package name (string)
43            data = the package data (dictionary)
44            location = the location of the package (string)
45    """
46    packages = shrinkwrap.get("packages")
47    if not packages:
48        raise FetchError("Invalid shrinkwrap file format")
49
50    for location, data in packages.items():
51        # Skip empty main and local link target packages
52        if not location.startswith('node_modules/'):
53            continue
54        elif not dev and data.get("dev", False):
55            continue
56        elif data.get("inBundle", False):
57            continue
58        name = location.split('node_modules/')[-1]
59        callback(name, data, location)
60
61class NpmShrinkWrap(FetchMethod):
62    """Class to fetch all package from a shrinkwrap file"""
63
64    def supports(self, ud, d):
65        """Check if a given url can be fetched with npmsw"""
66        return ud.type in ["npmsw"]
67
68    def urldata_init(self, ud, d):
69        """Init npmsw specific variables within url data"""
70
71        # Get the 'shrinkwrap' parameter
72        ud.shrinkwrap_file = re.sub(r"^npmsw://", "", ud.url.split(";")[0])
73
74        # Get the 'dev' parameter
75        ud.dev = bb.utils.to_boolean(ud.parm.get("dev"), False)
76
77        # Resolve the dependencies
78        ud.deps = []
79
80        def _resolve_dependency(name, params, destsuffix):
81            url = None
82            localpath = None
83            extrapaths = []
84            unpack = True
85
86            integrity = params.get("integrity")
87            resolved = params.get("resolved")
88            version = params.get("version")
89            link = params.get("link", False)
90
91            # Handle link sources
92            if link:
93                localpath = resolved
94                unpack = False
95
96            # Handle registry sources
97            elif version and is_semver(version) and integrity:
98                # Handle duplicate dependencies without url
99                if not resolved:
100                    return
101
102                localfile = npm_localfile(name, version)
103
104                uri = URI(resolved)
105                uri.params["downloadfilename"] = localfile
106
107                checksum_name, checksum_expected = npm_integrity(integrity)
108                uri.params[checksum_name] = checksum_expected
109
110                url = str(uri)
111
112                localpath = os.path.join(d.getVar("DL_DIR"), localfile)
113
114                # Create a resolve file to mimic the npm fetcher and allow
115                # re-usability of the downloaded file.
116                resolvefile = localpath + ".resolved"
117
118                bb.utils.mkdirhier(os.path.dirname(resolvefile))
119                with open(resolvefile, "w") as f:
120                    f.write(url)
121
122                extrapaths.append(resolvefile)
123
124            # Handle http tarball sources
125            elif resolved.startswith("http") and integrity:
126                localfile = npm_localfile(os.path.basename(resolved))
127
128                uri = URI(resolved)
129                uri.params["downloadfilename"] = localfile
130
131                checksum_name, checksum_expected = npm_integrity(integrity)
132                uri.params[checksum_name] = checksum_expected
133
134                url = str(uri)
135
136                localpath = os.path.join(d.getVar("DL_DIR"), localfile)
137
138            # Handle local tarball sources
139            elif resolved.startswith("file"):
140                localpath = resolved[5:]
141
142            # Handle git sources
143            elif resolved.startswith("git"):
144                regex = re.compile(r"""
145                    ^
146                    git\+
147                    (?P<protocol>[a-z]+)
148                    ://
149                    (?P<url>[^#]+)
150                    \#
151                    (?P<rev>[0-9a-f]+)
152                    $
153                    """, re.VERBOSE)
154
155                match = regex.match(resolved)
156                if not match:
157                    raise ParameterError("Invalid git url: %s" % resolved, ud.url)
158
159                groups = match.groupdict()
160
161                uri = URI("git://" + str(groups["url"]))
162                uri.params["protocol"] = str(groups["protocol"])
163                uri.params["rev"] = str(groups["rev"])
164                uri.params["nobranch"] = "1"
165                uri.params["destsuffix"] = destsuffix
166
167                url = str(uri)
168
169            else:
170                raise ParameterError("Unsupported dependency: %s" % name, ud.url)
171
172            # name is needed by unpack tracer for module mapping
173            ud.deps.append({
174                "name": name,
175                "url": url,
176                "localpath": localpath,
177                "extrapaths": extrapaths,
178                "destsuffix": destsuffix,
179                "unpack": unpack,
180            })
181
182        try:
183            with open(ud.shrinkwrap_file, "r") as f:
184                shrinkwrap = json.load(f)
185        except Exception as e:
186            raise ParameterError("Invalid shrinkwrap file: %s" % str(e), ud.url)
187
188        foreach_dependencies(shrinkwrap, _resolve_dependency, ud.dev)
189
190        # Avoid conflicts between the environment data and:
191        # - the proxy url revision
192        # - the proxy url checksum
193        data = bb.data.createCopy(d)
194        data.delVar("SRCREV")
195        data.delVarFlags("SRC_URI")
196
197        # This fetcher resolves multiple URIs from a shrinkwrap file and then
198        # forwards it to a proxy fetcher. The management of the donestamp file,
199        # the lockfile and the checksums are forwarded to the proxy fetcher.
200        shrinkwrap_urls = [dep["url"] for dep in ud.deps if dep["url"]]
201        if shrinkwrap_urls:
202            ud.proxy = Fetch(shrinkwrap_urls, data)
203        ud.needdonestamp = False
204
205    @staticmethod
206    def _foreach_proxy_method(ud, handle):
207        returns = []
208        #Check if there are dependencies before try to fetch them
209        if len(ud.deps) > 0:
210            for proxy_url in ud.proxy.urls:
211                proxy_ud = ud.proxy.ud[proxy_url]
212                proxy_d = ud.proxy.d
213                proxy_ud.setup_localpath(proxy_d)
214                lf = lockfile(proxy_ud.lockfile)
215                returns.append(handle(proxy_ud.method, proxy_ud, proxy_d))
216                unlockfile(lf)
217        return returns
218
219    def verify_donestamp(self, ud, d):
220        """Verify the donestamp file"""
221        def _handle(m, ud, d):
222            return m.verify_donestamp(ud, d)
223        return all(self._foreach_proxy_method(ud, _handle))
224
225    def update_donestamp(self, ud, d):
226        """Update the donestamp file"""
227        def _handle(m, ud, d):
228            m.update_donestamp(ud, d)
229        self._foreach_proxy_method(ud, _handle)
230
231    def need_update(self, ud, d):
232        """Force a fetch, even if localpath exists ?"""
233        def _handle(m, ud, d):
234            return m.need_update(ud, d)
235        return all(self._foreach_proxy_method(ud, _handle))
236
237    def try_mirrors(self, fetch, ud, d, mirrors):
238        """Try to use a mirror"""
239        def _handle(m, ud, d):
240            return m.try_mirrors(fetch, ud, d, mirrors)
241        return all(self._foreach_proxy_method(ud, _handle))
242
243    def download(self, ud, d):
244        """Fetch url"""
245        ud.proxy.download()
246
247    def unpack(self, ud, rootdir, d):
248        """Unpack the downloaded dependencies"""
249        destdir = rootdir
250        destsuffix = ud.parm.get("destsuffix")
251        if destsuffix:
252            destdir = os.path.join(rootdir, destsuffix)
253        ud.unpack_tracer.unpack("npm-shrinkwrap", destdir)
254
255        bb.utils.mkdirhier(destdir)
256        bb.utils.copyfile(ud.shrinkwrap_file,
257                          os.path.join(destdir, "npm-shrinkwrap.json"))
258
259        auto = [dep["url"] for dep in ud.deps if not dep["localpath"]]
260        manual = [dep for dep in ud.deps if dep["localpath"]]
261
262        if auto:
263            ud.proxy.unpack(destdir, auto)
264
265        for dep in manual:
266            depdestdir = os.path.join(destdir, dep["destsuffix"])
267            if dep["url"]:
268                npm_unpack(dep["localpath"], depdestdir, d)
269            else:
270                depsrcdir= os.path.join(destdir, dep["localpath"])
271                if dep["unpack"]:
272                    npm_unpack(depsrcdir, depdestdir, d)
273                else:
274                    bb.utils.mkdirhier(depdestdir)
275                    cmd = 'cp -fpPRH "%s/." .' % (depsrcdir)
276                    runfetchcmd(cmd, d, workdir=depdestdir)
277
278    def clean(self, ud, d):
279        """Clean any existing full or partial download"""
280        ud.proxy.clean()
281
282        # Clean extra files
283        for dep in ud.deps:
284            for path in dep["extrapaths"]:
285                bb.utils.remove(path)
286
287    def done(self, ud, d):
288        """Is the download done ?"""
289        def _handle(m, ud, d):
290            return m.done(ud, d)
291        return all(self._foreach_proxy_method(ud, _handle))
292