xref: /openbmc/openbmc/poky/bitbake/lib/bb/fetch2/git.py (revision e1615142)
1"""
2BitBake 'Fetch' git implementation
3
4git fetcher support the SRC_URI with format of:
5SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..."
6
7Supported SRC_URI options are:
8
9- branch
10   The git branch to retrieve from. The default is "master"
11
12   This option also supports multiple branch fetching, with branches
13   separated by commas.  In multiple branches case, the name option
14   must have the same number of names to match the branches, which is
15   used to specify the SRC_REV for the branch
16   e.g:
17   SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
18   SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
19   SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
20
21- tag
22    The git tag to retrieve. The default is "master"
23
24- protocol
25   The method to use to access the repository. Common options are "git",
26   "http", "https", "file", "ssh" and "rsync". The default is "git".
27
28- rebaseable
29   rebaseable indicates that the upstream git repo may rebase in the future,
30   and current revision may disappear from upstream repo. This option will
31   remind fetcher to preserve local cache carefully for future use.
32   The default value is "0", set rebaseable=1 for rebaseable git repo.
33
34- nocheckout
35   Don't checkout source code when unpacking. set this option for the recipe
36   who has its own routine to checkout code.
37   The default is "0", set nocheckout=1 if needed.
38
39- bareclone
40   Create a bare clone of the source code and don't checkout the source code
41   when unpacking. Set this option for the recipe who has its own routine to
42   checkout code and tracking branch requirements.
43   The default is "0", set bareclone=1 if needed.
44
45- nobranch
46   Don't check the SHA validation for branch. set this option for the recipe
47   referring to commit which is valid in tag instead of branch.
48   The default is "0", set nobranch=1 if needed.
49
50- usehead
51   For local git:// urls to use the current branch HEAD as the revision for use with
52   AUTOREV. Implies nobranch.
53
54"""
55
56# Copyright (C) 2005 Richard Purdie
57#
58# SPDX-License-Identifier: GPL-2.0-only
59#
60
61import collections
62import errno
63import fnmatch
64import os
65import re
66import subprocess
67import tempfile
68import bb
69import bb.progress
70from   bb.fetch2 import FetchMethod
71from   bb.fetch2 import runfetchcmd
72from   bb.fetch2 import logger
73
74
75class GitProgressHandler(bb.progress.LineFilterProgressHandler):
76    """Extract progress information from git output"""
77    def __init__(self, d):
78        self._buffer = ''
79        self._count = 0
80        super(GitProgressHandler, self).__init__(d)
81        # Send an initial progress event so the bar gets shown
82        self._fire_progress(-1)
83
84    def write(self, string):
85        self._buffer += string
86        stages = ['Counting objects', 'Compressing objects', 'Receiving objects', 'Resolving deltas']
87        stage_weights = [0.2, 0.05, 0.5, 0.25]
88        stagenum = 0
89        for i, stage in reversed(list(enumerate(stages))):
90            if stage in self._buffer:
91                stagenum = i
92                self._buffer = ''
93                break
94        self._status = stages[stagenum]
95        percs = re.findall(r'(\d+)%', string)
96        if percs:
97            progress = int(round((int(percs[-1]) * stage_weights[stagenum]) + (sum(stage_weights[:stagenum]) * 100)))
98            rates = re.findall(r'([\d.]+ [a-zA-Z]*/s+)', string)
99            if rates:
100                rate = rates[-1]
101            else:
102                rate = None
103            self.update(progress, rate)
104        else:
105            if stagenum == 0:
106                percs = re.findall(r': (\d+)', string)
107                if percs:
108                    count = int(percs[-1])
109                    if count > self._count:
110                        self._count = count
111                        self._fire_progress(-count)
112        super(GitProgressHandler, self).write(string)
113
114
115class Git(FetchMethod):
116    bitbake_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.join(os.path.abspath(__file__))), '..', '..', '..'))
117    make_shallow_path = os.path.join(bitbake_dir, 'bin', 'git-make-shallow')
118
119    """Class to fetch a module or modules from git repositories"""
120    def init(self, d):
121        pass
122
123    def supports(self, ud, d):
124        """
125        Check to see if a given url can be fetched with git.
126        """
127        return ud.type in ['git']
128
129    def supports_checksum(self, urldata):
130        return False
131
132    def urldata_init(self, ud, d):
133        """
134        init git specific variable within url data
135        so that the git method like latest_revision() can work
136        """
137        if 'protocol' in ud.parm:
138            ud.proto = ud.parm['protocol']
139        elif not ud.host:
140            ud.proto = 'file'
141        else:
142            ud.proto = "git"
143
144        if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
145            raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
146
147        ud.nocheckout = ud.parm.get("nocheckout","0") == "1"
148
149        ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
150
151        ud.nobranch = ud.parm.get("nobranch","0") == "1"
152
153        # usehead implies nobranch
154        ud.usehead = ud.parm.get("usehead","0") == "1"
155        if ud.usehead:
156            if ud.proto != "file":
157                 raise bb.fetch2.ParameterError("The usehead option is only for use with local ('protocol=file') git repositories", ud.url)
158            ud.nobranch = 1
159
160        # bareclone implies nocheckout
161        ud.bareclone = ud.parm.get("bareclone","0") == "1"
162        if ud.bareclone:
163            ud.nocheckout = 1
164
165        ud.unresolvedrev = {}
166        branches = ud.parm.get("branch", "master").split(',')
167        if len(branches) != len(ud.names):
168            raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
169
170        ud.cloneflags = "-s -n"
171        if ud.bareclone:
172            ud.cloneflags += " --mirror"
173
174        ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1"
175        ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split()
176
177        depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH")
178        if depth_default is not None:
179            try:
180                depth_default = int(depth_default or 0)
181            except ValueError:
182                raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH: %s" % depth_default)
183            else:
184                if depth_default < 0:
185                    raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH: %s" % depth_default)
186        else:
187            depth_default = 1
188        ud.shallow_depths = collections.defaultdict(lambda: depth_default)
189
190        revs_default = d.getVar("BB_GIT_SHALLOW_REVS")
191        ud.shallow_revs = []
192        ud.branches = {}
193        for pos, name in enumerate(ud.names):
194            branch = branches[pos]
195            ud.branches[name] = branch
196            ud.unresolvedrev[name] = branch
197
198            shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % name)
199            if shallow_depth is not None:
200                try:
201                    shallow_depth = int(shallow_depth or 0)
202                except ValueError:
203                    raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth))
204                else:
205                    if shallow_depth < 0:
206                        raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth))
207                    ud.shallow_depths[name] = shallow_depth
208
209            revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % name)
210            if revs is not None:
211                ud.shallow_revs.extend(revs.split())
212            elif revs_default is not None:
213                ud.shallow_revs.extend(revs_default.split())
214
215        if (ud.shallow and
216                not ud.shallow_revs and
217                all(ud.shallow_depths[n] == 0 for n in ud.names)):
218            # Shallow disabled for this URL
219            ud.shallow = False
220
221        if ud.usehead:
222            ud.unresolvedrev['default'] = 'HEAD'
223
224        ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
225
226        write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
227        ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
228        ud.write_shallow_tarballs = (d.getVar("BB_GENERATE_SHALLOW_TARBALLS") or write_tarballs) != "0"
229
230        ud.setup_revisions(d)
231
232        for name in ud.names:
233            # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
234            if not ud.revisions[name] or len(ud.revisions[name]) != 40  or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
235                if ud.revisions[name]:
236                    ud.unresolvedrev[name] = ud.revisions[name]
237                ud.revisions[name] = self.latest_revision(ud, d, name)
238
239        gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.'))
240        if gitsrcname.startswith('.'):
241            gitsrcname = gitsrcname[1:]
242
243        # for rebaseable git repo, it is necessary to keep mirror tar ball
244        # per revision, so that even the revision disappears from the
245        # upstream repo in the future, the mirror will remain intact and still
246        # contains the revision
247        if ud.rebaseable:
248            for name in ud.names:
249                gitsrcname = gitsrcname + '_' + ud.revisions[name]
250
251        dl_dir = d.getVar("DL_DIR")
252        gitdir = d.getVar("GITDIR") or (dl_dir + "/git2")
253        ud.clonedir = os.path.join(gitdir, gitsrcname)
254        ud.localfile = ud.clonedir
255
256        mirrortarball = 'git2_%s.tar.gz' % gitsrcname
257        ud.fullmirror = os.path.join(dl_dir, mirrortarball)
258        ud.mirrortarballs = [mirrortarball]
259        if ud.shallow:
260            tarballname = gitsrcname
261            if ud.bareclone:
262                tarballname = "%s_bare" % tarballname
263
264            if ud.shallow_revs:
265                tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs)))
266
267            for name, revision in sorted(ud.revisions.items()):
268                tarballname = "%s_%s" % (tarballname, ud.revisions[name][:7])
269                depth = ud.shallow_depths[name]
270                if depth:
271                    tarballname = "%s-%s" % (tarballname, depth)
272
273            shallow_refs = []
274            if not ud.nobranch:
275                shallow_refs.extend(ud.branches.values())
276            if ud.shallow_extra_refs:
277                shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs)
278            if shallow_refs:
279                tarballname = "%s_%s" % (tarballname, "_".join(sorted(shallow_refs)).replace('/', '.'))
280
281            fetcher = self.__class__.__name__.lower()
282            ud.shallowtarball = '%sshallow_%s.tar.gz' % (fetcher, tarballname)
283            ud.fullshallow = os.path.join(dl_dir, ud.shallowtarball)
284            ud.mirrortarballs.insert(0, ud.shallowtarball)
285
286    def localpath(self, ud, d):
287        return ud.clonedir
288
289    def need_update(self, ud, d):
290        return self.clonedir_need_update(ud, d) or self.shallow_tarball_need_update(ud) or self.tarball_need_update(ud)
291
292    def clonedir_need_update(self, ud, d):
293        if not os.path.exists(ud.clonedir):
294            return True
295        if ud.shallow and ud.write_shallow_tarballs and self.clonedir_need_shallow_revs(ud, d):
296            return True
297        for name in ud.names:
298            if not self._contains_ref(ud, d, name, ud.clonedir):
299                return True
300        return False
301
302    def clonedir_need_shallow_revs(self, ud, d):
303        for rev in ud.shallow_revs:
304            try:
305                runfetchcmd('%s rev-parse -q --verify %s' % (ud.basecmd, rev), d, quiet=True, workdir=ud.clonedir)
306            except bb.fetch2.FetchError:
307                return rev
308        return None
309
310    def shallow_tarball_need_update(self, ud):
311        return ud.shallow and ud.write_shallow_tarballs and not os.path.exists(ud.fullshallow)
312
313    def tarball_need_update(self, ud):
314        return ud.write_tarballs and not os.path.exists(ud.fullmirror)
315
316    def try_premirror(self, ud, d):
317        # If we don't do this, updating an existing checkout with only premirrors
318        # is not possible
319        if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
320            return True
321        if os.path.exists(ud.clonedir):
322            return False
323        return True
324
325    def download(self, ud, d):
326        """Fetch url"""
327
328        # A current clone is preferred to either tarball, a shallow tarball is
329        # preferred to an out of date clone, and a missing clone will use
330        # either tarball.
331        if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
332            ud.localpath = ud.fullshallow
333            return
334        elif os.path.exists(ud.fullmirror) and not os.path.exists(ud.clonedir):
335            bb.utils.mkdirhier(ud.clonedir)
336            runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
337
338        repourl = self._get_repo_url(ud)
339
340        # If the repo still doesn't exist, fallback to cloning it
341        if not os.path.exists(ud.clonedir):
342            # We do this since git will use a "-l" option automatically for local urls where possible
343            if repourl.startswith("file://"):
344                repourl = repourl[7:]
345            clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, repourl, ud.clonedir)
346            if ud.proto.lower() != 'file':
347                bb.fetch2.check_network_access(d, clone_cmd, ud.url)
348            progresshandler = GitProgressHandler(d)
349            runfetchcmd(clone_cmd, d, log=progresshandler)
350
351        # Update the checkout if needed
352        if self.clonedir_need_update(ud, d):
353            output = runfetchcmd("%s remote" % ud.basecmd, d, quiet=True, workdir=ud.clonedir)
354            if "origin" in output:
355              runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
356
357            runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
358            fetch_cmd = "LANG=C %s fetch -f --prune --progress %s refs/*:refs/*" % (ud.basecmd, repourl)
359            if ud.proto.lower() != 'file':
360                bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
361            progresshandler = GitProgressHandler(d)
362            runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
363            runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
364            runfetchcmd("%s pack-refs --all" % ud.basecmd, d, workdir=ud.clonedir)
365            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d, workdir=ud.clonedir)
366            try:
367                os.unlink(ud.fullmirror)
368            except OSError as exc:
369                if exc.errno != errno.ENOENT:
370                    raise
371
372        for name in ud.names:
373            if not self._contains_ref(ud, d, name, ud.clonedir):
374                raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
375
376        if ud.shallow and ud.write_shallow_tarballs:
377            missing_rev = self.clonedir_need_shallow_revs(ud, d)
378            if missing_rev:
379                raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev)
380
381    def build_mirror_data(self, ud, d):
382        if ud.shallow and ud.write_shallow_tarballs:
383            if not os.path.exists(ud.fullshallow):
384                if os.path.islink(ud.fullshallow):
385                    os.unlink(ud.fullshallow)
386                tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
387                shallowclone = os.path.join(tempdir, 'git')
388                try:
389                    self.clone_shallow_local(ud, shallowclone, d)
390
391                    logger.info("Creating tarball of git repository")
392                    runfetchcmd("tar -czf %s ." % ud.fullshallow, d, workdir=shallowclone)
393                    runfetchcmd("touch %s.done" % ud.fullshallow, d)
394                finally:
395                    bb.utils.remove(tempdir, recurse=True)
396        elif ud.write_tarballs and not os.path.exists(ud.fullmirror):
397            if os.path.islink(ud.fullmirror):
398                os.unlink(ud.fullmirror)
399
400            logger.info("Creating tarball of git repository")
401            runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir)
402            runfetchcmd("touch %s.done" % ud.fullmirror, d)
403
404    def clone_shallow_local(self, ud, dest, d):
405        """Clone the repo and make it shallow.
406
407        The upstream url of the new clone isn't set at this time, as it'll be
408        set correctly when unpacked."""
409        runfetchcmd("%s clone %s %s %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d)
410
411        to_parse, shallow_branches = [], []
412        for name in ud.names:
413            revision = ud.revisions[name]
414            depth = ud.shallow_depths[name]
415            if depth:
416                to_parse.append('%s~%d^{}' % (revision, depth - 1))
417
418            # For nobranch, we need a ref, otherwise the commits will be
419            # removed, and for non-nobranch, we truncate the branch to our
420            # srcrev, to avoid keeping unnecessary history beyond that.
421            branch = ud.branches[name]
422            if ud.nobranch:
423                ref = "refs/shallow/%s" % name
424            elif ud.bareclone:
425                ref = "refs/heads/%s" % branch
426            else:
427                ref = "refs/remotes/origin/%s" % branch
428
429            shallow_branches.append(ref)
430            runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
431
432        # Map srcrev+depths to revisions
433        parsed_depths = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join(to_parse)), d, workdir=dest)
434
435        # Resolve specified revisions
436        parsed_revs = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)), d, workdir=dest)
437        shallow_revisions = parsed_depths.splitlines() + parsed_revs.splitlines()
438
439        # Apply extra ref wildcards
440        all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' % ud.basecmd,
441                               d, workdir=dest).splitlines()
442        for r in ud.shallow_extra_refs:
443            if not ud.bareclone:
444                r = r.replace('refs/heads/', 'refs/remotes/origin/')
445
446            if '*' in r:
447                matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs)
448                shallow_branches.extend(matches)
449            else:
450                shallow_branches.append(r)
451
452        # Make the repository shallow
453        shallow_cmd = [self.make_shallow_path, '-s']
454        for b in shallow_branches:
455            shallow_cmd.append('-r')
456            shallow_cmd.append(b)
457        shallow_cmd.extend(shallow_revisions)
458        runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest)
459
460    def unpack(self, ud, destdir, d):
461        """ unpack the downloaded src to destdir"""
462
463        subdir = ud.parm.get("subpath", "")
464        if subdir != "":
465            readpathspec = ":%s" % subdir
466            def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
467        else:
468            readpathspec = ""
469            def_destsuffix = "git/"
470
471        destsuffix = ud.parm.get("destsuffix", def_destsuffix)
472        destdir = ud.destdir = os.path.join(destdir, destsuffix)
473        if os.path.exists(destdir):
474            bb.utils.prunedir(destdir)
475
476        need_lfs = ud.parm.get("lfs", "1") == "1"
477
478        source_found = False
479        source_error = []
480
481        if not source_found:
482            clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
483            if clonedir_is_up_to_date:
484                runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
485                source_found = True
486            else:
487                source_error.append("clone directory not available or not up to date: " + ud.clonedir)
488
489        if not source_found:
490            if ud.shallow:
491                if os.path.exists(ud.fullshallow):
492                    bb.utils.mkdirhier(destdir)
493                    runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=destdir)
494                    source_found = True
495                else:
496                    source_error.append("shallow clone not available: " + ud.fullshallow)
497            else:
498                source_error.append("shallow clone not enabled")
499
500        if not source_found:
501            raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
502
503        repourl = self._get_repo_url(ud)
504        runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
505
506        if self._contains_lfs(ud, d, destdir):
507            if need_lfs and not self._find_git_lfs(d):
508                raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl))
509            else:
510                bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
511
512        if not ud.nocheckout:
513            if subdir != "":
514                runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d,
515                            workdir=destdir)
516                runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
517            elif not ud.nobranch:
518                branchname =  ud.branches[ud.names[0]]
519                runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
520                            ud.revisions[ud.names[0]]), d, workdir=destdir)
521                runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
522                            branchname), d, workdir=destdir)
523            else:
524                runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir)
525
526        return True
527
528    def clean(self, ud, d):
529        """ clean the git directory """
530
531        to_remove = [ud.localpath, ud.fullmirror, ud.fullmirror + ".done"]
532        # The localpath is a symlink to clonedir when it is cloned from a
533        # mirror, so remove both of them.
534        if os.path.islink(ud.localpath):
535            clonedir = os.path.realpath(ud.localpath)
536            to_remove.append(clonedir)
537
538        for r in to_remove:
539            if os.path.exists(r):
540                bb.note('Removing %s' % r)
541                bb.utils.remove(r, True)
542
543    def supports_srcrev(self):
544        return True
545
546    def _contains_ref(self, ud, d, name, wd):
547        cmd = ""
548        if ud.nobranch:
549            cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
550                ud.basecmd, ud.revisions[name])
551        else:
552            cmd =  "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
553                ud.basecmd, ud.revisions[name], ud.branches[name])
554        try:
555            output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
556        except bb.fetch2.FetchError:
557            return False
558        if len(output.split()) > 1:
559            raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
560        return output.split()[0] != "0"
561
562    def _contains_lfs(self, ud, d, wd):
563        """
564        Check if the repository has 'lfs' (large file) content
565        """
566        cmd = "%s grep lfs HEAD:.gitattributes | wc -l" % (
567                ud.basecmd)
568        try:
569            output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
570            if int(output) > 0:
571                return True
572        except (bb.fetch2.FetchError,ValueError):
573            pass
574        return False
575
576    def _find_git_lfs(self, d):
577        """
578        Return True if git-lfs can be found, False otherwise.
579        """
580        import shutil
581        return shutil.which("git-lfs", path=d.getVar('PATH')) is not None
582
583    def _get_repo_url(self, ud):
584        """
585        Return the repository URL
586        """
587        if ud.user:
588            username = ud.user + '@'
589        else:
590            username = ""
591        return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
592
593    def _revision_key(self, ud, d, name):
594        """
595        Return a unique key for the url
596        """
597        return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name]
598
599    def _lsremote(self, ud, d, search):
600        """
601        Run git ls-remote with the specified search string
602        """
603        # Prevent recursion e.g. in OE if SRCPV is in PV, PV is in WORKDIR,
604        # and WORKDIR is in PATH (as a result of RSS), our call to
605        # runfetchcmd() exports PATH so this function will get called again (!)
606        # In this scenario the return call of the function isn't actually
607        # important - WORKDIR isn't needed in PATH to call git ls-remote
608        # anyway.
609        if d.getVar('_BB_GIT_IN_LSREMOTE', False):
610            return ''
611        d.setVar('_BB_GIT_IN_LSREMOTE', '1')
612        try:
613            repourl = self._get_repo_url(ud)
614            cmd = "%s ls-remote %s %s" % \
615                (ud.basecmd, repourl, search)
616            if ud.proto.lower() != 'file':
617                bb.fetch2.check_network_access(d, cmd, repourl)
618            output = runfetchcmd(cmd, d, True)
619            if not output:
620                raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
621        finally:
622            d.delVar('_BB_GIT_IN_LSREMOTE')
623        return output
624
625    def _latest_revision(self, ud, d, name):
626        """
627        Compute the HEAD revision for the url
628        """
629        output = self._lsremote(ud, d, "")
630        # Tags of the form ^{} may not work, need to fallback to other form
631        if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead:
632            head = ud.unresolvedrev[name]
633            tag = ud.unresolvedrev[name]
634        else:
635            head = "refs/heads/%s" % ud.unresolvedrev[name]
636            tag = "refs/tags/%s" % ud.unresolvedrev[name]
637        for s in [head, tag + "^{}", tag]:
638            for l in output.strip().split('\n'):
639                sha1, ref = l.split()
640                if s == ref:
641                    return sha1
642        raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
643            (ud.unresolvedrev[name], ud.host+ud.path))
644
645    def latest_versionstring(self, ud, d):
646        """
647        Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
648        by searching through the tags output of ls-remote, comparing
649        versions and returning the highest match.
650        """
651        pupver = ('', '')
652
653        tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
654        try:
655            output = self._lsremote(ud, d, "refs/tags/*")
656        except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
657            bb.note("Could not list remote: %s" % str(e))
658            return pupver
659
660        verstring = ""
661        revision = ""
662        for line in output.split("\n"):
663            if not line:
664                break
665
666            tag_head = line.split("/")[-1]
667            # Ignore non-released branches
668            m = re.search(r"(alpha|beta|rc|final)+", tag_head)
669            if m:
670                continue
671
672            # search for version in the line
673            tag = tagregex.search(tag_head)
674            if tag == None:
675                continue
676
677            tag = tag.group('pver')
678            tag = tag.replace("_", ".")
679
680            if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
681                continue
682
683            verstring = tag
684            revision = line.split()[0]
685            pupver = (verstring, revision)
686
687        return pupver
688
689    def _build_revision(self, ud, d, name):
690        return ud.revisions[name]
691
692    def gitpkgv_revision(self, ud, d, name):
693        """
694        Return a sortable revision number by counting commits in the history
695        Based on gitpkgv.bblass in meta-openembedded
696        """
697        rev = self._build_revision(ud, d, name)
698        localpath = ud.localpath
699        rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
700        if not os.path.exists(localpath):
701            commits = None
702        else:
703            if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
704                from pipes import quote
705                commits = bb.fetch2.runfetchcmd(
706                        "git rev-list %s -- | wc -l" % quote(rev),
707                        d, quiet=True).strip().lstrip('0')
708                if commits:
709                    open(rev_file, "w").write("%d\n" % int(commits))
710            else:
711                commits = open(rev_file, "r").readline(128).strip()
712        if commits:
713            return False, "%s+%s" % (commits, rev[:7])
714        else:
715            return True, str(rev)
716
717    def checkstatus(self, fetch, ud, d):
718        try:
719            self._lsremote(ud, d, "")
720            return True
721        except bb.fetch2.FetchError:
722            return False
723