xref: /openbmc/openbmc/poky/bitbake/lib/bb/fetch2/npmsw.py (revision ac13d5f3)
1# Copyright (C) 2020 Savoir-Faire Linux
2#
3# SPDX-License-Identifier: GPL-2.0-only
4#
5"""
6BitBake 'Fetch' npm shrinkwrap implementation
7
8npm fetcher support the SRC_URI with format of:
9SRC_URI = "npmsw://some.registry.url;OptionA=xxx;OptionB=xxx;..."
10
11Supported SRC_URI options are:
12
13- dev
14   Set to 1 to also install devDependencies.
15
16- destsuffix
17    Specifies the directory to use to unpack the dependencies (default: ${S}).
18"""
19
20import json
21import os
22import re
23import bb
24from bb.fetch2 import Fetch
25from bb.fetch2 import FetchMethod
26from bb.fetch2 import ParameterError
27from bb.fetch2 import runfetchcmd
28from bb.fetch2 import URI
29from bb.fetch2.npm import npm_integrity
30from bb.fetch2.npm import npm_localfile
31from bb.fetch2.npm import npm_unpack
32from bb.utils import is_semver
33from bb.utils import lockfile
34from bb.utils import unlockfile
35
36def foreach_dependencies(shrinkwrap, callback=None, dev=False):
37    """
38        Run a callback for each dependencies of a shrinkwrap file.
39        The callback is using the format:
40            callback(name, params, deptree)
41        with:
42            name = the package name (string)
43            params = the package parameters (dictionary)
44            destdir = the destination of the package (string)
45    """
46    # For handling old style dependencies entries in shinkwrap files
47    def _walk_deps(deps, deptree):
48        for name in deps:
49            subtree = [*deptree, name]
50            _walk_deps(deps[name].get("dependencies", {}), subtree)
51            if callback is not None:
52                if deps[name].get("dev", False) and not dev:
53                    continue
54                elif deps[name].get("bundled", False):
55                    continue
56                destsubdirs = [os.path.join("node_modules", dep) for dep in subtree]
57                destsuffix = os.path.join(*destsubdirs)
58                callback(name, deps[name], destsuffix)
59
60    # packages entry means new style shrinkwrap file, else use dependencies
61    packages = shrinkwrap.get("packages", None)
62    if packages is not None:
63        for package in packages:
64            if package != "":
65                name = package.split('node_modules/')[-1]
66                package_infos = packages.get(package, {})
67                if dev == False and package_infos.get("dev", False):
68                    continue
69                callback(name, package_infos, package)
70    else:
71        _walk_deps(shrinkwrap.get("dependencies", {}), [])
72
73class NpmShrinkWrap(FetchMethod):
74    """Class to fetch all package from a shrinkwrap file"""
75
76    def supports(self, ud, d):
77        """Check if a given url can be fetched with npmsw"""
78        return ud.type in ["npmsw"]
79
80    def urldata_init(self, ud, d):
81        """Init npmsw specific variables within url data"""
82
83        # Get the 'shrinkwrap' parameter
84        ud.shrinkwrap_file = re.sub(r"^npmsw://", "", ud.url.split(";")[0])
85
86        # Get the 'dev' parameter
87        ud.dev = bb.utils.to_boolean(ud.parm.get("dev"), False)
88
89        # Resolve the dependencies
90        ud.deps = []
91
92        def _resolve_dependency(name, params, destsuffix):
93            url = None
94            localpath = None
95            extrapaths = []
96            unpack = True
97
98            integrity = params.get("integrity", None)
99            resolved = params.get("resolved", None)
100            version = params.get("version", None)
101
102            # Handle registry sources
103            if is_semver(version) and integrity:
104                # Handle duplicate dependencies without url
105                if not resolved:
106                    return
107
108                localfile = npm_localfile(name, version)
109
110                uri = URI(resolved)
111                uri.params["downloadfilename"] = localfile
112
113                checksum_name, checksum_expected = npm_integrity(integrity)
114                uri.params[checksum_name] = checksum_expected
115
116                url = str(uri)
117
118                localpath = os.path.join(d.getVar("DL_DIR"), localfile)
119
120                # Create a resolve file to mimic the npm fetcher and allow
121                # re-usability of the downloaded file.
122                resolvefile = localpath + ".resolved"
123
124                bb.utils.mkdirhier(os.path.dirname(resolvefile))
125                with open(resolvefile, "w") as f:
126                    f.write(url)
127
128                extrapaths.append(resolvefile)
129
130            # Handle http tarball sources
131            elif version.startswith("http") and integrity:
132                localfile = npm_localfile(os.path.basename(version))
133
134                uri = URI(version)
135                uri.params["downloadfilename"] = localfile
136
137                checksum_name, checksum_expected = npm_integrity(integrity)
138                uri.params[checksum_name] = checksum_expected
139
140                url = str(uri)
141
142                localpath = os.path.join(d.getVar("DL_DIR"), localfile)
143
144            # Handle local tarball and link sources
145            elif version.startswith("file"):
146                localpath = version[5:]
147                if not version.endswith(".tgz"):
148                    unpack = False
149
150            # Handle git sources
151            elif version.startswith(("git", "bitbucket","gist")) or (
152                not version.endswith((".tgz", ".tar", ".tar.gz"))
153                and not version.startswith((".", "@", "/"))
154                and "/" in version
155            ):
156                if version.startswith("github:"):
157                    version = "git+https://github.com/" + version[len("github:"):]
158                elif version.startswith("gist:"):
159                    version = "git+https://gist.github.com/" + version[len("gist:"):]
160                elif version.startswith("bitbucket:"):
161                    version = "git+https://bitbucket.org/" + version[len("bitbucket:"):]
162                elif version.startswith("gitlab:"):
163                    version = "git+https://gitlab.com/" + version[len("gitlab:"):]
164                elif not version.startswith(("git+","git:")):
165                    version = "git+https://github.com/" + version
166                regex = re.compile(r"""
167                    ^
168                    git\+
169                    (?P<protocol>[a-z]+)
170                    ://
171                    (?P<url>[^#]+)
172                    \#
173                    (?P<rev>[0-9a-f]+)
174                    $
175                    """, re.VERBOSE)
176
177                match = regex.match(version)
178
179                if not match:
180                    raise ParameterError("Invalid git url: %s" % version, ud.url)
181
182                groups = match.groupdict()
183
184                uri = URI("git://" + str(groups["url"]))
185                uri.params["protocol"] = str(groups["protocol"])
186                uri.params["rev"] = str(groups["rev"])
187                uri.params["destsuffix"] = destsuffix
188
189                url = str(uri)
190
191            else:
192                raise ParameterError("Unsupported dependency: %s" % name, ud.url)
193
194            # name is needed by unpack tracer for module mapping
195            ud.deps.append({
196                "name": name,
197                "url": url,
198                "localpath": localpath,
199                "extrapaths": extrapaths,
200                "destsuffix": destsuffix,
201                "unpack": unpack,
202            })
203
204        try:
205            with open(ud.shrinkwrap_file, "r") as f:
206                shrinkwrap = json.load(f)
207        except Exception as e:
208            raise ParameterError("Invalid shrinkwrap file: %s" % str(e), ud.url)
209
210        foreach_dependencies(shrinkwrap, _resolve_dependency, ud.dev)
211
212        # Avoid conflicts between the environment data and:
213        # - the proxy url revision
214        # - the proxy url checksum
215        data = bb.data.createCopy(d)
216        data.delVar("SRCREV")
217        data.delVarFlags("SRC_URI")
218
219        # This fetcher resolves multiple URIs from a shrinkwrap file and then
220        # forwards it to a proxy fetcher. The management of the donestamp file,
221        # the lockfile and the checksums are forwarded to the proxy fetcher.
222        shrinkwrap_urls = [dep["url"] for dep in ud.deps if dep["url"]]
223        if shrinkwrap_urls:
224            ud.proxy = Fetch(shrinkwrap_urls, data)
225        ud.needdonestamp = False
226
227    @staticmethod
228    def _foreach_proxy_method(ud, handle):
229        returns = []
230        #Check if there are dependencies before try to fetch them
231        if len(ud.deps) > 0:
232            for proxy_url in ud.proxy.urls:
233                proxy_ud = ud.proxy.ud[proxy_url]
234                proxy_d = ud.proxy.d
235                proxy_ud.setup_localpath(proxy_d)
236                lf = lockfile(proxy_ud.lockfile)
237                returns.append(handle(proxy_ud.method, proxy_ud, proxy_d))
238                unlockfile(lf)
239        return returns
240
241    def verify_donestamp(self, ud, d):
242        """Verify the donestamp file"""
243        def _handle(m, ud, d):
244            return m.verify_donestamp(ud, d)
245        return all(self._foreach_proxy_method(ud, _handle))
246
247    def update_donestamp(self, ud, d):
248        """Update the donestamp file"""
249        def _handle(m, ud, d):
250            m.update_donestamp(ud, d)
251        self._foreach_proxy_method(ud, _handle)
252
253    def need_update(self, ud, d):
254        """Force a fetch, even if localpath exists ?"""
255        def _handle(m, ud, d):
256            return m.need_update(ud, d)
257        return all(self._foreach_proxy_method(ud, _handle))
258
259    def try_mirrors(self, fetch, ud, d, mirrors):
260        """Try to use a mirror"""
261        def _handle(m, ud, d):
262            return m.try_mirrors(fetch, ud, d, mirrors)
263        return all(self._foreach_proxy_method(ud, _handle))
264
265    def download(self, ud, d):
266        """Fetch url"""
267        ud.proxy.download()
268
269    def unpack(self, ud, rootdir, d):
270        """Unpack the downloaded dependencies"""
271        destdir = d.getVar("S")
272        destsuffix = ud.parm.get("destsuffix")
273        if destsuffix:
274            destdir = os.path.join(rootdir, destsuffix)
275        ud.unpack_tracer.unpack("npm-shrinkwrap", destdir)
276
277        bb.utils.mkdirhier(destdir)
278        bb.utils.copyfile(ud.shrinkwrap_file,
279                          os.path.join(destdir, "npm-shrinkwrap.json"))
280
281        auto = [dep["url"] for dep in ud.deps if not dep["localpath"]]
282        manual = [dep for dep in ud.deps if dep["localpath"]]
283
284        if auto:
285            ud.proxy.unpack(destdir, auto)
286
287        for dep in manual:
288            depdestdir = os.path.join(destdir, dep["destsuffix"])
289            if dep["url"]:
290                npm_unpack(dep["localpath"], depdestdir, d)
291            else:
292                depsrcdir= os.path.join(destdir, dep["localpath"])
293                if dep["unpack"]:
294                    npm_unpack(depsrcdir, depdestdir, d)
295                else:
296                    bb.utils.mkdirhier(depdestdir)
297                    cmd = 'cp -fpPRH "%s/." .' % (depsrcdir)
298                    runfetchcmd(cmd, d, workdir=depdestdir)
299
300    def clean(self, ud, d):
301        """Clean any existing full or partial download"""
302        ud.proxy.clean()
303
304        # Clean extra files
305        for dep in ud.deps:
306            for path in dep["extrapaths"]:
307                bb.utils.remove(path)
308
309    def done(self, ud, d):
310        """Is the download done ?"""
311        def _handle(m, ud, d):
312            return m.done(ud, d)
313        return all(self._foreach_proxy_method(ud, _handle))
314