xref: /openbmc/openbmc/poky/bitbake/lib/bb/fetch2/git.py (revision 15ae2509)
1"""
2BitBake 'Fetch' git implementation
3
4git fetcher support the SRC_URI with format of:
5SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..."
6
7Supported SRC_URI options are:
8
9- branch
10   The git branch to retrieve from. The default is "master"
11
12   This option also supports multiple branch fetching, with branches
13   separated by commas.  In multiple branches case, the name option
14   must have the same number of names to match the branches, which is
15   used to specify the SRC_REV for the branch
16   e.g:
17   SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
18   SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
19   SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
20
21- tag
22    The git tag to retrieve. The default is "master"
23
24- protocol
25   The method to use to access the repository. Common options are "git",
26   "http", "https", "file", "ssh" and "rsync". The default is "git".
27
28- rebaseable
29   rebaseable indicates that the upstream git repo may rebase in the future,
30   and current revision may disappear from upstream repo. This option will
31   remind fetcher to preserve local cache carefully for future use.
32   The default value is "0", set rebaseable=1 for rebaseable git repo.
33
34- nocheckout
35   Don't checkout source code when unpacking. set this option for the recipe
36   who has its own routine to checkout code.
37   The default is "0", set nocheckout=1 if needed.
38
39- bareclone
40   Create a bare clone of the source code and don't checkout the source code
41   when unpacking. Set this option for the recipe who has its own routine to
42   checkout code and tracking branch requirements.
43   The default is "0", set bareclone=1 if needed.
44
45- nobranch
46   Don't check the SHA validation for branch. set this option for the recipe
47   referring to commit which is valid in tag instead of branch.
48   The default is "0", set nobranch=1 if needed.
49
50- usehead
51   For local git:// urls to use the current branch HEAD as the revision for use with
52   AUTOREV. Implies nobranch.
53
54"""
55
56# Copyright (C) 2005 Richard Purdie
57#
58# SPDX-License-Identifier: GPL-2.0-only
59#
60
61import collections
62import errno
63import fnmatch
64import os
65import re
66import subprocess
67import tempfile
68import bb
69import bb.progress
70from   bb.fetch2 import FetchMethod
71from   bb.fetch2 import runfetchcmd
72from   bb.fetch2 import logger
73
74
75class GitProgressHandler(bb.progress.LineFilterProgressHandler):
76    """Extract progress information from git output"""
77    def __init__(self, d):
78        self._buffer = ''
79        self._count = 0
80        super(GitProgressHandler, self).__init__(d)
81        # Send an initial progress event so the bar gets shown
82        self._fire_progress(-1)
83
84    def write(self, string):
85        self._buffer += string
86        stages = ['Counting objects', 'Compressing objects', 'Receiving objects', 'Resolving deltas']
87        stage_weights = [0.2, 0.05, 0.5, 0.25]
88        stagenum = 0
89        for i, stage in reversed(list(enumerate(stages))):
90            if stage in self._buffer:
91                stagenum = i
92                self._buffer = ''
93                break
94        self._status = stages[stagenum]
95        percs = re.findall(r'(\d+)%', string)
96        if percs:
97            progress = int(round((int(percs[-1]) * stage_weights[stagenum]) + (sum(stage_weights[:stagenum]) * 100)))
98            rates = re.findall(r'([\d.]+ [a-zA-Z]*/s+)', string)
99            if rates:
100                rate = rates[-1]
101            else:
102                rate = None
103            self.update(progress, rate)
104        else:
105            if stagenum == 0:
106                percs = re.findall(r': (\d+)', string)
107                if percs:
108                    count = int(percs[-1])
109                    if count > self._count:
110                        self._count = count
111                        self._fire_progress(-count)
112        super(GitProgressHandler, self).write(string)
113
114
115class Git(FetchMethod):
116    bitbake_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.join(os.path.abspath(__file__))), '..', '..', '..'))
117    make_shallow_path = os.path.join(bitbake_dir, 'bin', 'git-make-shallow')
118
119    """Class to fetch a module or modules from git repositories"""
120    def init(self, d):
121        pass
122
123    def supports(self, ud, d):
124        """
125        Check to see if a given url can be fetched with git.
126        """
127        return ud.type in ['git']
128
129    def supports_checksum(self, urldata):
130        return False
131
132    def urldata_init(self, ud, d):
133        """
134        init git specific variable within url data
135        so that the git method like latest_revision() can work
136        """
137        if 'protocol' in ud.parm:
138            ud.proto = ud.parm['protocol']
139        elif not ud.host:
140            ud.proto = 'file'
141        else:
142            ud.proto = "git"
143
144        if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
145            raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
146
147        ud.nocheckout = ud.parm.get("nocheckout","0") == "1"
148
149        ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
150
151        ud.nobranch = ud.parm.get("nobranch","0") == "1"
152
153        # usehead implies nobranch
154        ud.usehead = ud.parm.get("usehead","0") == "1"
155        if ud.usehead:
156            if ud.proto != "file":
157                 raise bb.fetch2.ParameterError("The usehead option is only for use with local ('protocol=file') git repositories", ud.url)
158            ud.nobranch = 1
159
160        # bareclone implies nocheckout
161        ud.bareclone = ud.parm.get("bareclone","0") == "1"
162        if ud.bareclone:
163            ud.nocheckout = 1
164
165        ud.unresolvedrev = {}
166        branches = ud.parm.get("branch", "master").split(',')
167        if len(branches) != len(ud.names):
168            raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
169
170        ud.cloneflags = "-s -n"
171        if ud.bareclone:
172            ud.cloneflags += " --mirror"
173
174        ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1"
175        ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split()
176
177        depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH")
178        if depth_default is not None:
179            try:
180                depth_default = int(depth_default or 0)
181            except ValueError:
182                raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH: %s" % depth_default)
183            else:
184                if depth_default < 0:
185                    raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH: %s" % depth_default)
186        else:
187            depth_default = 1
188        ud.shallow_depths = collections.defaultdict(lambda: depth_default)
189
190        revs_default = d.getVar("BB_GIT_SHALLOW_REVS")
191        ud.shallow_revs = []
192        ud.branches = {}
193        for pos, name in enumerate(ud.names):
194            branch = branches[pos]
195            ud.branches[name] = branch
196            ud.unresolvedrev[name] = branch
197
198            shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % name)
199            if shallow_depth is not None:
200                try:
201                    shallow_depth = int(shallow_depth or 0)
202                except ValueError:
203                    raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth))
204                else:
205                    if shallow_depth < 0:
206                        raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth))
207                    ud.shallow_depths[name] = shallow_depth
208
209            revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % name)
210            if revs is not None:
211                ud.shallow_revs.extend(revs.split())
212            elif revs_default is not None:
213                ud.shallow_revs.extend(revs_default.split())
214
215        if (ud.shallow and
216                not ud.shallow_revs and
217                all(ud.shallow_depths[n] == 0 for n in ud.names)):
218            # Shallow disabled for this URL
219            ud.shallow = False
220
221        if ud.usehead:
222            ud.unresolvedrev['default'] = 'HEAD'
223
224        ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
225
226        write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
227        ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
228        ud.write_shallow_tarballs = (d.getVar("BB_GENERATE_SHALLOW_TARBALLS") or write_tarballs) != "0"
229
230        ud.setup_revisions(d)
231
232        for name in ud.names:
233            # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
234            if not ud.revisions[name] or len(ud.revisions[name]) != 40  or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
235                if ud.revisions[name]:
236                    ud.unresolvedrev[name] = ud.revisions[name]
237                ud.revisions[name] = self.latest_revision(ud, d, name)
238
239        gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.'))
240        if gitsrcname.startswith('.'):
241            gitsrcname = gitsrcname[1:]
242
243        # for rebaseable git repo, it is necessary to keep mirror tar ball
244        # per revision, so that even the revision disappears from the
245        # upstream repo in the future, the mirror will remain intact and still
246        # contains the revision
247        if ud.rebaseable:
248            for name in ud.names:
249                gitsrcname = gitsrcname + '_' + ud.revisions[name]
250
251        dl_dir = d.getVar("DL_DIR")
252        gitdir = d.getVar("GITDIR") or (dl_dir + "/git2")
253        ud.clonedir = os.path.join(gitdir, gitsrcname)
254        ud.localfile = ud.clonedir
255
256        mirrortarball = 'git2_%s.tar.gz' % gitsrcname
257        ud.fullmirror = os.path.join(dl_dir, mirrortarball)
258        ud.mirrortarballs = [mirrortarball]
259        if ud.shallow:
260            tarballname = gitsrcname
261            if ud.bareclone:
262                tarballname = "%s_bare" % tarballname
263
264            if ud.shallow_revs:
265                tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs)))
266
267            for name, revision in sorted(ud.revisions.items()):
268                tarballname = "%s_%s" % (tarballname, ud.revisions[name][:7])
269                depth = ud.shallow_depths[name]
270                if depth:
271                    tarballname = "%s-%s" % (tarballname, depth)
272
273            shallow_refs = []
274            if not ud.nobranch:
275                shallow_refs.extend(ud.branches.values())
276            if ud.shallow_extra_refs:
277                shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs)
278            if shallow_refs:
279                tarballname = "%s_%s" % (tarballname, "_".join(sorted(shallow_refs)).replace('/', '.'))
280
281            fetcher = self.__class__.__name__.lower()
282            ud.shallowtarball = '%sshallow_%s.tar.gz' % (fetcher, tarballname)
283            ud.fullshallow = os.path.join(dl_dir, ud.shallowtarball)
284            ud.mirrortarballs.insert(0, ud.shallowtarball)
285
286    def localpath(self, ud, d):
287        return ud.clonedir
288
289    def need_update(self, ud, d):
290        return self.clonedir_need_update(ud, d) or self.shallow_tarball_need_update(ud) or self.tarball_need_update(ud)
291
292    def clonedir_need_update(self, ud, d):
293        if not os.path.exists(ud.clonedir):
294            return True
295        for name in ud.names:
296            if not self._contains_ref(ud, d, name, ud.clonedir):
297                return True
298        return False
299
300    def shallow_tarball_need_update(self, ud):
301        return ud.shallow and ud.write_shallow_tarballs and not os.path.exists(ud.fullshallow)
302
303    def tarball_need_update(self, ud):
304        return ud.write_tarballs and not os.path.exists(ud.fullmirror)
305
306    def try_premirror(self, ud, d):
307        # If we don't do this, updating an existing checkout with only premirrors
308        # is not possible
309        if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
310            return True
311        if os.path.exists(ud.clonedir):
312            return False
313        return True
314
315    def download(self, ud, d):
316        """Fetch url"""
317
318        # A current clone is preferred to either tarball, a shallow tarball is
319        # preferred to an out of date clone, and a missing clone will use
320        # either tarball.
321        if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
322            ud.localpath = ud.fullshallow
323            return
324        elif os.path.exists(ud.fullmirror) and not os.path.exists(ud.clonedir):
325            bb.utils.mkdirhier(ud.clonedir)
326            runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
327
328        repourl = self._get_repo_url(ud)
329
330        # If the repo still doesn't exist, fallback to cloning it
331        if not os.path.exists(ud.clonedir):
332            # We do this since git will use a "-l" option automatically for local urls where possible
333            if repourl.startswith("file://"):
334                repourl = repourl[7:]
335            clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, repourl, ud.clonedir)
336            if ud.proto.lower() != 'file':
337                bb.fetch2.check_network_access(d, clone_cmd, ud.url)
338            progresshandler = GitProgressHandler(d)
339            runfetchcmd(clone_cmd, d, log=progresshandler)
340
341        # Update the checkout if needed
342        needupdate = False
343        for name in ud.names:
344            if not self._contains_ref(ud, d, name, ud.clonedir):
345                needupdate = True
346                break
347
348        if needupdate:
349            output = runfetchcmd("%s remote" % ud.basecmd, d, quiet=True, workdir=ud.clonedir)
350            if "origin" in output:
351              runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
352
353            runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
354            fetch_cmd = "LANG=C %s fetch -f --prune --progress %s refs/*:refs/*" % (ud.basecmd, repourl)
355            if ud.proto.lower() != 'file':
356                bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
357            progresshandler = GitProgressHandler(d)
358            runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
359            runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
360            runfetchcmd("%s pack-refs --all" % ud.basecmd, d, workdir=ud.clonedir)
361            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d, workdir=ud.clonedir)
362            try:
363                os.unlink(ud.fullmirror)
364            except OSError as exc:
365                if exc.errno != errno.ENOENT:
366                    raise
367
368        for name in ud.names:
369            if not self._contains_ref(ud, d, name, ud.clonedir):
370                raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
371
372    def build_mirror_data(self, ud, d):
373        if ud.shallow and ud.write_shallow_tarballs:
374            if not os.path.exists(ud.fullshallow):
375                if os.path.islink(ud.fullshallow):
376                    os.unlink(ud.fullshallow)
377                tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
378                shallowclone = os.path.join(tempdir, 'git')
379                try:
380                    self.clone_shallow_local(ud, shallowclone, d)
381
382                    logger.info("Creating tarball of git repository")
383                    runfetchcmd("tar -czf %s ." % ud.fullshallow, d, workdir=shallowclone)
384                    runfetchcmd("touch %s.done" % ud.fullshallow, d)
385                finally:
386                    bb.utils.remove(tempdir, recurse=True)
387        elif ud.write_tarballs and not os.path.exists(ud.fullmirror):
388            if os.path.islink(ud.fullmirror):
389                os.unlink(ud.fullmirror)
390
391            logger.info("Creating tarball of git repository")
392            runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir)
393            runfetchcmd("touch %s.done" % ud.fullmirror, d)
394
395    def clone_shallow_local(self, ud, dest, d):
396        """Clone the repo and make it shallow.
397
398        The upstream url of the new clone isn't set at this time, as it'll be
399        set correctly when unpacked."""
400        runfetchcmd("%s clone %s %s %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d)
401
402        to_parse, shallow_branches = [], []
403        for name in ud.names:
404            revision = ud.revisions[name]
405            depth = ud.shallow_depths[name]
406            if depth:
407                to_parse.append('%s~%d^{}' % (revision, depth - 1))
408
409            # For nobranch, we need a ref, otherwise the commits will be
410            # removed, and for non-nobranch, we truncate the branch to our
411            # srcrev, to avoid keeping unnecessary history beyond that.
412            branch = ud.branches[name]
413            if ud.nobranch:
414                ref = "refs/shallow/%s" % name
415            elif ud.bareclone:
416                ref = "refs/heads/%s" % branch
417            else:
418                ref = "refs/remotes/origin/%s" % branch
419
420            shallow_branches.append(ref)
421            runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
422
423        # Map srcrev+depths to revisions
424        parsed_depths = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join(to_parse)), d, workdir=dest)
425
426        # Resolve specified revisions
427        parsed_revs = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)), d, workdir=dest)
428        shallow_revisions = parsed_depths.splitlines() + parsed_revs.splitlines()
429
430        # Apply extra ref wildcards
431        all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' % ud.basecmd,
432                               d, workdir=dest).splitlines()
433        for r in ud.shallow_extra_refs:
434            if not ud.bareclone:
435                r = r.replace('refs/heads/', 'refs/remotes/origin/')
436
437            if '*' in r:
438                matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs)
439                shallow_branches.extend(matches)
440            else:
441                shallow_branches.append(r)
442
443        # Make the repository shallow
444        shallow_cmd = [self.make_shallow_path, '-s']
445        for b in shallow_branches:
446            shallow_cmd.append('-r')
447            shallow_cmd.append(b)
448        shallow_cmd.extend(shallow_revisions)
449        runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest)
450
451    def unpack(self, ud, destdir, d):
452        """ unpack the downloaded src to destdir"""
453
454        subdir = ud.parm.get("subpath", "")
455        if subdir != "":
456            readpathspec = ":%s" % subdir
457            def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
458        else:
459            readpathspec = ""
460            def_destsuffix = "git/"
461
462        destsuffix = ud.parm.get("destsuffix", def_destsuffix)
463        destdir = ud.destdir = os.path.join(destdir, destsuffix)
464        if os.path.exists(destdir):
465            bb.utils.prunedir(destdir)
466
467        source_found = False
468        source_error = []
469
470        if not source_found:
471            clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
472            if clonedir_is_up_to_date:
473                runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
474                source_found = True
475            else:
476                source_error.append("clone directory not available or not up to date: " + ud.clonedir)
477
478        if not source_found:
479            if ud.shallow:
480                if os.path.exists(ud.fullshallow):
481                    bb.utils.mkdirhier(destdir)
482                    runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=destdir)
483                    source_found = True
484                else:
485                    source_error.append("shallow clone not available: " + ud.fullshallow)
486            else:
487                source_error.append("shallow clone not enabled")
488
489        if not source_found:
490            raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
491
492        repourl = self._get_repo_url(ud)
493        runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
494
495        if self._contains_lfs(ud, d, destdir):
496            path = d.getVar('PATH')
497            if path:
498                gitlfstool = bb.utils.which(path, "git-lfs", executable=True)
499                if not gitlfstool:
500                    raise bb.fetch2.FetchError("Repository %s has lfs content, install git-lfs plugin on host to download" % (repourl))
501            else:
502                bb.note("Could not find 'PATH'")
503
504
505        if not ud.nocheckout:
506            if subdir != "":
507                runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d,
508                            workdir=destdir)
509                runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
510            elif not ud.nobranch:
511                branchname =  ud.branches[ud.names[0]]
512                runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
513                            ud.revisions[ud.names[0]]), d, workdir=destdir)
514                runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
515                            branchname), d, workdir=destdir)
516            else:
517                runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir)
518
519        return True
520
521    def clean(self, ud, d):
522        """ clean the git directory """
523
524        to_remove = [ud.localpath, ud.fullmirror, ud.fullmirror + ".done"]
525        # The localpath is a symlink to clonedir when it is cloned from a
526        # mirror, so remove both of them.
527        if os.path.islink(ud.localpath):
528            clonedir = os.path.realpath(ud.localpath)
529            to_remove.append(clonedir)
530
531        for r in to_remove:
532            if os.path.exists(r):
533                bb.note('Removing %s' % r)
534                bb.utils.remove(r, True)
535
536    def supports_srcrev(self):
537        return True
538
539    def _contains_ref(self, ud, d, name, wd):
540        cmd = ""
541        if ud.nobranch:
542            cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
543                ud.basecmd, ud.revisions[name])
544        else:
545            cmd =  "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
546                ud.basecmd, ud.revisions[name], ud.branches[name])
547        try:
548            output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
549        except bb.fetch2.FetchError:
550            return False
551        if len(output.split()) > 1:
552            raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
553        return output.split()[0] != "0"
554
555    def _contains_lfs(self, ud, d, wd):
556        """
557        Check if the repository has 'lfs' (large file) content
558        """
559        cmd = "%s grep lfs HEAD:.gitattributes | wc -l" % (
560                ud.basecmd)
561        try:
562            output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
563            if int(output) > 0:
564                return True
565        except (bb.fetch2.FetchError,ValueError):
566            pass
567        return False
568
569    def _get_repo_url(self, ud):
570        """
571        Return the repository URL
572        """
573        if ud.user:
574            username = ud.user + '@'
575        else:
576            username = ""
577        return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
578
579    def _revision_key(self, ud, d, name):
580        """
581        Return a unique key for the url
582        """
583        return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name]
584
585    def _lsremote(self, ud, d, search):
586        """
587        Run git ls-remote with the specified search string
588        """
589        # Prevent recursion e.g. in OE if SRCPV is in PV, PV is in WORKDIR,
590        # and WORKDIR is in PATH (as a result of RSS), our call to
591        # runfetchcmd() exports PATH so this function will get called again (!)
592        # In this scenario the return call of the function isn't actually
593        # important - WORKDIR isn't needed in PATH to call git ls-remote
594        # anyway.
595        if d.getVar('_BB_GIT_IN_LSREMOTE', False):
596            return ''
597        d.setVar('_BB_GIT_IN_LSREMOTE', '1')
598        try:
599            repourl = self._get_repo_url(ud)
600            cmd = "%s ls-remote %s %s" % \
601                (ud.basecmd, repourl, search)
602            if ud.proto.lower() != 'file':
603                bb.fetch2.check_network_access(d, cmd, repourl)
604            output = runfetchcmd(cmd, d, True)
605            if not output:
606                raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
607        finally:
608            d.delVar('_BB_GIT_IN_LSREMOTE')
609        return output
610
611    def _latest_revision(self, ud, d, name):
612        """
613        Compute the HEAD revision for the url
614        """
615        output = self._lsremote(ud, d, "")
616        # Tags of the form ^{} may not work, need to fallback to other form
617        if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead:
618            head = ud.unresolvedrev[name]
619            tag = ud.unresolvedrev[name]
620        else:
621            head = "refs/heads/%s" % ud.unresolvedrev[name]
622            tag = "refs/tags/%s" % ud.unresolvedrev[name]
623        for s in [head, tag + "^{}", tag]:
624            for l in output.strip().split('\n'):
625                sha1, ref = l.split()
626                if s == ref:
627                    return sha1
628        raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
629            (ud.unresolvedrev[name], ud.host+ud.path))
630
631    def latest_versionstring(self, ud, d):
632        """
633        Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
634        by searching through the tags output of ls-remote, comparing
635        versions and returning the highest match.
636        """
637        pupver = ('', '')
638
639        tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
640        try:
641            output = self._lsremote(ud, d, "refs/tags/*")
642        except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
643            bb.note("Could not list remote: %s" % str(e))
644            return pupver
645
646        verstring = ""
647        revision = ""
648        for line in output.split("\n"):
649            if not line:
650                break
651
652            tag_head = line.split("/")[-1]
653            # Ignore non-released branches
654            m = re.search(r"(alpha|beta|rc|final)+", tag_head)
655            if m:
656                continue
657
658            # search for version in the line
659            tag = tagregex.search(tag_head)
660            if tag == None:
661                continue
662
663            tag = tag.group('pver')
664            tag = tag.replace("_", ".")
665
666            if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
667                continue
668
669            verstring = tag
670            revision = line.split()[0]
671            pupver = (verstring, revision)
672
673        return pupver
674
675    def _build_revision(self, ud, d, name):
676        return ud.revisions[name]
677
678    def gitpkgv_revision(self, ud, d, name):
679        """
680        Return a sortable revision number by counting commits in the history
681        Based on gitpkgv.bblass in meta-openembedded
682        """
683        rev = self._build_revision(ud, d, name)
684        localpath = ud.localpath
685        rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
686        if not os.path.exists(localpath):
687            commits = None
688        else:
689            if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
690                from pipes import quote
691                commits = bb.fetch2.runfetchcmd(
692                        "git rev-list %s -- | wc -l" % quote(rev),
693                        d, quiet=True).strip().lstrip('0')
694                if commits:
695                    open(rev_file, "w").write("%d\n" % int(commits))
696            else:
697                commits = open(rev_file, "r").readline(128).strip()
698        if commits:
699            return False, "%s+%s" % (commits, rev[:7])
700        else:
701            return True, str(rev)
702
703    def checkstatus(self, fetch, ud, d):
704        try:
705            self._lsremote(ud, d, "")
706            return True
707        except bb.fetch2.FetchError:
708            return False
709