xref: /openbmc/openbmc/poky/bitbake/lib/bb/fetch2/git.py (revision eb8dc403)
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git implementation
5
6git fetcher support the SRC_URI with format of:
7SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..."
8
9Supported SRC_URI options are:
10
11- branch
12   The git branch to retrieve from. The default is "master"
13
14   This option also supports multiple branch fetching, with branches
15   separated by commas.  In multiple branches case, the name option
16   must have the same number of names to match the branches, which is
17   used to specify the SRC_REV for the branch
18   e.g:
19   SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
20   SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
21   SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
22
23- tag
24    The git tag to retrieve. The default is "master"
25
26- protocol
27   The method to use to access the repository. Common options are "git",
28   "http", "https", "file", "ssh" and "rsync". The default is "git".
29
30- rebaseable
31   rebaseable indicates that the upstream git repo may rebase in the future,
32   and current revision may disappear from upstream repo. This option will
33   remind fetcher to preserve local cache carefully for future use.
34   The default value is "0", set rebaseable=1 for rebaseable git repo.
35
36- nocheckout
37   Don't checkout source code when unpacking. set this option for the recipe
38   who has its own routine to checkout code.
39   The default is "0", set nocheckout=1 if needed.
40
41- bareclone
42   Create a bare clone of the source code and don't checkout the source code
43   when unpacking. Set this option for the recipe who has its own routine to
44   checkout code and tracking branch requirements.
45   The default is "0", set bareclone=1 if needed.
46
47- nobranch
48   Don't check the SHA validation for branch. set this option for the recipe
49   referring to commit which is valid in tag instead of branch.
50   The default is "0", set nobranch=1 if needed.
51
52- usehead
53   For local git:// urls to use the current branch HEAD as the revision for use with
54   AUTOREV. Implies nobranch.
55
56"""
57
58#Copyright (C) 2005 Richard Purdie
59#
60# This program is free software; you can redistribute it and/or modify
61# it under the terms of the GNU General Public License version 2 as
62# published by the Free Software Foundation.
63#
64# This program is distributed in the hope that it will be useful,
65# but WITHOUT ANY WARRANTY; without even the implied warranty of
66# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
67# GNU General Public License for more details.
68#
69# You should have received a copy of the GNU General Public License along
70# with this program; if not, write to the Free Software Foundation, Inc.,
71# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
72
73import collections
74import errno
75import fnmatch
76import os
77import re
78import subprocess
79import tempfile
80import bb
81import bb.progress
82from   bb.fetch2 import FetchMethod
83from   bb.fetch2 import runfetchcmd
84from   bb.fetch2 import logger
85
86
87class GitProgressHandler(bb.progress.LineFilterProgressHandler):
88    """Extract progress information from git output"""
89    def __init__(self, d):
90        self._buffer = ''
91        self._count = 0
92        super(GitProgressHandler, self).__init__(d)
93        # Send an initial progress event so the bar gets shown
94        self._fire_progress(-1)
95
96    def write(self, string):
97        self._buffer += string
98        stages = ['Counting objects', 'Compressing objects', 'Receiving objects', 'Resolving deltas']
99        stage_weights = [0.2, 0.05, 0.5, 0.25]
100        stagenum = 0
101        for i, stage in reversed(list(enumerate(stages))):
102            if stage in self._buffer:
103                stagenum = i
104                self._buffer = ''
105                break
106        self._status = stages[stagenum]
107        percs = re.findall(r'(\d+)%', string)
108        if percs:
109            progress = int(round((int(percs[-1]) * stage_weights[stagenum]) + (sum(stage_weights[:stagenum]) * 100)))
110            rates = re.findall(r'([\d.]+ [a-zA-Z]*/s+)', string)
111            if rates:
112                rate = rates[-1]
113            else:
114                rate = None
115            self.update(progress, rate)
116        else:
117            if stagenum == 0:
118                percs = re.findall(r': (\d+)', string)
119                if percs:
120                    count = int(percs[-1])
121                    if count > self._count:
122                        self._count = count
123                        self._fire_progress(-count)
124        super(GitProgressHandler, self).write(string)
125
126
127class Git(FetchMethod):
128    bitbake_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.join(os.path.abspath(__file__))), '..', '..', '..'))
129    make_shallow_path = os.path.join(bitbake_dir, 'bin', 'git-make-shallow')
130
131    """Class to fetch a module or modules from git repositories"""
132    def init(self, d):
133        pass
134
135    def supports(self, ud, d):
136        """
137        Check to see if a given url can be fetched with git.
138        """
139        return ud.type in ['git']
140
141    def supports_checksum(self, urldata):
142        return False
143
144    def urldata_init(self, ud, d):
145        """
146        init git specific variable within url data
147        so that the git method like latest_revision() can work
148        """
149        if 'protocol' in ud.parm:
150            ud.proto = ud.parm['protocol']
151        elif not ud.host:
152            ud.proto = 'file'
153        else:
154            ud.proto = "git"
155
156        if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
157            raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
158
159        ud.nocheckout = ud.parm.get("nocheckout","0") == "1"
160
161        ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
162
163        ud.nobranch = ud.parm.get("nobranch","0") == "1"
164
165        # usehead implies nobranch
166        ud.usehead = ud.parm.get("usehead","0") == "1"
167        if ud.usehead:
168            if ud.proto != "file":
169                 raise bb.fetch2.ParameterError("The usehead option is only for use with local ('protocol=file') git repositories", ud.url)
170            ud.nobranch = 1
171
172        # bareclone implies nocheckout
173        ud.bareclone = ud.parm.get("bareclone","0") == "1"
174        if ud.bareclone:
175            ud.nocheckout = 1
176
177        ud.unresolvedrev = {}
178        branches = ud.parm.get("branch", "master").split(',')
179        if len(branches) != len(ud.names):
180            raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
181
182        ud.cloneflags = "-s -n"
183        if ud.bareclone:
184            ud.cloneflags += " --mirror"
185
186        ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1"
187        ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split()
188
189        depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH")
190        if depth_default is not None:
191            try:
192                depth_default = int(depth_default or 0)
193            except ValueError:
194                raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH: %s" % depth_default)
195            else:
196                if depth_default < 0:
197                    raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH: %s" % depth_default)
198        else:
199            depth_default = 1
200        ud.shallow_depths = collections.defaultdict(lambda: depth_default)
201
202        revs_default = d.getVar("BB_GIT_SHALLOW_REVS", True)
203        ud.shallow_revs = []
204        ud.branches = {}
205        for pos, name in enumerate(ud.names):
206            branch = branches[pos]
207            ud.branches[name] = branch
208            ud.unresolvedrev[name] = branch
209
210            shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % name)
211            if shallow_depth is not None:
212                try:
213                    shallow_depth = int(shallow_depth or 0)
214                except ValueError:
215                    raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth))
216                else:
217                    if shallow_depth < 0:
218                        raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth))
219                    ud.shallow_depths[name] = shallow_depth
220
221            revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % name)
222            if revs is not None:
223                ud.shallow_revs.extend(revs.split())
224            elif revs_default is not None:
225                ud.shallow_revs.extend(revs_default.split())
226
227        if (ud.shallow and
228                not ud.shallow_revs and
229                all(ud.shallow_depths[n] == 0 for n in ud.names)):
230            # Shallow disabled for this URL
231            ud.shallow = False
232
233        if ud.usehead:
234            ud.unresolvedrev['default'] = 'HEAD'
235
236        ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
237
238        write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
239        ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
240        ud.write_shallow_tarballs = (d.getVar("BB_GENERATE_SHALLOW_TARBALLS") or write_tarballs) != "0"
241
242        ud.setup_revisions(d)
243
244        for name in ud.names:
245            # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
246            if not ud.revisions[name] or len(ud.revisions[name]) != 40  or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
247                if ud.revisions[name]:
248                    ud.unresolvedrev[name] = ud.revisions[name]
249                ud.revisions[name] = self.latest_revision(ud, d, name)
250
251        gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.'))
252        if gitsrcname.startswith('.'):
253            gitsrcname = gitsrcname[1:]
254
255        # for rebaseable git repo, it is necessary to keep mirror tar ball
256        # per revision, so that even the revision disappears from the
257        # upstream repo in the future, the mirror will remain intact and still
258        # contains the revision
259        if ud.rebaseable:
260            for name in ud.names:
261                gitsrcname = gitsrcname + '_' + ud.revisions[name]
262
263        dl_dir = d.getVar("DL_DIR")
264        gitdir = d.getVar("GITDIR") or (dl_dir + "/git2/")
265        ud.clonedir = os.path.join(gitdir, gitsrcname)
266        ud.localfile = ud.clonedir
267
268        mirrortarball = 'git2_%s.tar.gz' % gitsrcname
269        ud.fullmirror = os.path.join(dl_dir, mirrortarball)
270        ud.mirrortarballs = [mirrortarball]
271        if ud.shallow:
272            tarballname = gitsrcname
273            if ud.bareclone:
274                tarballname = "%s_bare" % tarballname
275
276            if ud.shallow_revs:
277                tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs)))
278
279            for name, revision in sorted(ud.revisions.items()):
280                tarballname = "%s_%s" % (tarballname, ud.revisions[name][:7])
281                depth = ud.shallow_depths[name]
282                if depth:
283                    tarballname = "%s-%s" % (tarballname, depth)
284
285            shallow_refs = []
286            if not ud.nobranch:
287                shallow_refs.extend(ud.branches.values())
288            if ud.shallow_extra_refs:
289                shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs)
290            if shallow_refs:
291                tarballname = "%s_%s" % (tarballname, "_".join(sorted(shallow_refs)).replace('/', '.'))
292
293            fetcher = self.__class__.__name__.lower()
294            ud.shallowtarball = '%sshallow_%s.tar.gz' % (fetcher, tarballname)
295            ud.fullshallow = os.path.join(dl_dir, ud.shallowtarball)
296            ud.mirrortarballs.insert(0, ud.shallowtarball)
297
298    def localpath(self, ud, d):
299        return ud.clonedir
300
301    def need_update(self, ud, d):
302        if not os.path.exists(ud.clonedir):
303            return True
304        for name in ud.names:
305            if not self._contains_ref(ud, d, name, ud.clonedir):
306                return True
307        if ud.shallow and ud.write_shallow_tarballs and not os.path.exists(ud.fullshallow):
308            return True
309        if ud.write_tarballs and not os.path.exists(ud.fullmirror):
310            return True
311        return False
312
313    def try_premirror(self, ud, d):
314        # If we don't do this, updating an existing checkout with only premirrors
315        # is not possible
316        if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
317            return True
318        if os.path.exists(ud.clonedir):
319            return False
320        return True
321
322    def download(self, ud, d):
323        """Fetch url"""
324
325        no_clone = not os.path.exists(ud.clonedir)
326        need_update = no_clone or self.need_update(ud, d)
327
328        # A current clone is preferred to either tarball, a shallow tarball is
329        # preferred to an out of date clone, and a missing clone will use
330        # either tarball.
331        if ud.shallow and os.path.exists(ud.fullshallow) and need_update:
332            ud.localpath = ud.fullshallow
333            return
334        elif os.path.exists(ud.fullmirror) and no_clone:
335            bb.utils.mkdirhier(ud.clonedir)
336            runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
337
338        repourl = self._get_repo_url(ud)
339
340        # If the repo still doesn't exist, fallback to cloning it
341        if not os.path.exists(ud.clonedir):
342            # We do this since git will use a "-l" option automatically for local urls where possible
343            if repourl.startswith("file://"):
344                repourl = repourl[7:]
345            clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, repourl, ud.clonedir)
346            if ud.proto.lower() != 'file':
347                bb.fetch2.check_network_access(d, clone_cmd, ud.url)
348            progresshandler = GitProgressHandler(d)
349            runfetchcmd(clone_cmd, d, log=progresshandler)
350
351        # Update the checkout if needed
352        needupdate = False
353        for name in ud.names:
354            if not self._contains_ref(ud, d, name, ud.clonedir):
355                needupdate = True
356        if needupdate:
357            try:
358                runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
359            except bb.fetch2.FetchError:
360                logger.debug(1, "No Origin")
361
362            runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
363            fetch_cmd = "LANG=C %s fetch -f --prune --progress %s refs/*:refs/*" % (ud.basecmd, repourl)
364            if ud.proto.lower() != 'file':
365                bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
366            progresshandler = GitProgressHandler(d)
367            runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
368            runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
369            runfetchcmd("%s pack-refs --all" % ud.basecmd, d, workdir=ud.clonedir)
370            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d, workdir=ud.clonedir)
371            try:
372                os.unlink(ud.fullmirror)
373            except OSError as exc:
374                if exc.errno != errno.ENOENT:
375                    raise
376        for name in ud.names:
377            if not self._contains_ref(ud, d, name, ud.clonedir):
378                raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
379
380    def build_mirror_data(self, ud, d):
381        if ud.shallow and ud.write_shallow_tarballs:
382            if not os.path.exists(ud.fullshallow):
383                if os.path.islink(ud.fullshallow):
384                    os.unlink(ud.fullshallow)
385                tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
386                shallowclone = os.path.join(tempdir, 'git')
387                try:
388                    self.clone_shallow_local(ud, shallowclone, d)
389
390                    logger.info("Creating tarball of git repository")
391                    runfetchcmd("tar -czf %s ." % ud.fullshallow, d, workdir=shallowclone)
392                    runfetchcmd("touch %s.done" % ud.fullshallow, d)
393                finally:
394                    bb.utils.remove(tempdir, recurse=True)
395        elif ud.write_tarballs and not os.path.exists(ud.fullmirror):
396            if os.path.islink(ud.fullmirror):
397                os.unlink(ud.fullmirror)
398
399            logger.info("Creating tarball of git repository")
400            runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir)
401            runfetchcmd("touch %s.done" % ud.fullmirror, d)
402
403    def clone_shallow_local(self, ud, dest, d):
404        """Clone the repo and make it shallow.
405
406        The upstream url of the new clone isn't set at this time, as it'll be
407        set correctly when unpacked."""
408        runfetchcmd("%s clone %s %s %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d)
409
410        to_parse, shallow_branches = [], []
411        for name in ud.names:
412            revision = ud.revisions[name]
413            depth = ud.shallow_depths[name]
414            if depth:
415                to_parse.append('%s~%d^{}' % (revision, depth - 1))
416
417            # For nobranch, we need a ref, otherwise the commits will be
418            # removed, and for non-nobranch, we truncate the branch to our
419            # srcrev, to avoid keeping unnecessary history beyond that.
420            branch = ud.branches[name]
421            if ud.nobranch:
422                ref = "refs/shallow/%s" % name
423            elif ud.bareclone:
424                ref = "refs/heads/%s" % branch
425            else:
426                ref = "refs/remotes/origin/%s" % branch
427
428            shallow_branches.append(ref)
429            runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
430
431        # Map srcrev+depths to revisions
432        parsed_depths = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join(to_parse)), d, workdir=dest)
433
434        # Resolve specified revisions
435        parsed_revs = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)), d, workdir=dest)
436        shallow_revisions = parsed_depths.splitlines() + parsed_revs.splitlines()
437
438        # Apply extra ref wildcards
439        all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' % ud.basecmd,
440                               d, workdir=dest).splitlines()
441        for r in ud.shallow_extra_refs:
442            if not ud.bareclone:
443                r = r.replace('refs/heads/', 'refs/remotes/origin/')
444
445            if '*' in r:
446                matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs)
447                shallow_branches.extend(matches)
448            else:
449                shallow_branches.append(r)
450
451        # Make the repository shallow
452        shallow_cmd = [self.make_shallow_path, '-s']
453        for b in shallow_branches:
454            shallow_cmd.append('-r')
455            shallow_cmd.append(b)
456        shallow_cmd.extend(shallow_revisions)
457        runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest)
458
459    def unpack(self, ud, destdir, d):
460        """ unpack the downloaded src to destdir"""
461
462        subdir = ud.parm.get("subpath", "")
463        if subdir != "":
464            readpathspec = ":%s" % subdir
465            def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
466        else:
467            readpathspec = ""
468            def_destsuffix = "git/"
469
470        destsuffix = ud.parm.get("destsuffix", def_destsuffix)
471        destdir = ud.destdir = os.path.join(destdir, destsuffix)
472        if os.path.exists(destdir):
473            bb.utils.prunedir(destdir)
474
475        if ud.shallow and (not os.path.exists(ud.clonedir) or self.need_update(ud, d)):
476            bb.utils.mkdirhier(destdir)
477            runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=destdir)
478        else:
479            runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
480
481        repourl = self._get_repo_url(ud)
482        runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
483        if not ud.nocheckout:
484            if subdir != "":
485                runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d,
486                            workdir=destdir)
487                runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
488            elif not ud.nobranch:
489                branchname =  ud.branches[ud.names[0]]
490                runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
491                            ud.revisions[ud.names[0]]), d, workdir=destdir)
492                runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
493                            branchname), d, workdir=destdir)
494            else:
495                runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir)
496
497        return True
498
499    def clean(self, ud, d):
500        """ clean the git directory """
501
502        bb.utils.remove(ud.localpath, True)
503        bb.utils.remove(ud.fullmirror)
504        bb.utils.remove(ud.fullmirror + ".done")
505
506    def supports_srcrev(self):
507        return True
508
509    def _contains_ref(self, ud, d, name, wd):
510        cmd = ""
511        if ud.nobranch:
512            cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
513                ud.basecmd, ud.revisions[name])
514        else:
515            cmd =  "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
516                ud.basecmd, ud.revisions[name], ud.branches[name])
517        try:
518            output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
519        except bb.fetch2.FetchError:
520            return False
521        if len(output.split()) > 1:
522            raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
523        return output.split()[0] != "0"
524
525    def _get_repo_url(self, ud):
526        """
527        Return the repository URL
528        """
529        if ud.user:
530            username = ud.user + '@'
531        else:
532            username = ""
533        return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
534
535    def _revision_key(self, ud, d, name):
536        """
537        Return a unique key for the url
538        """
539        return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name]
540
541    def _lsremote(self, ud, d, search):
542        """
543        Run git ls-remote with the specified search string
544        """
545        # Prevent recursion e.g. in OE if SRCPV is in PV, PV is in WORKDIR,
546        # and WORKDIR is in PATH (as a result of RSS), our call to
547        # runfetchcmd() exports PATH so this function will get called again (!)
548        # In this scenario the return call of the function isn't actually
549        # important - WORKDIR isn't needed in PATH to call git ls-remote
550        # anyway.
551        if d.getVar('_BB_GIT_IN_LSREMOTE', False):
552            return ''
553        d.setVar('_BB_GIT_IN_LSREMOTE', '1')
554        try:
555            repourl = self._get_repo_url(ud)
556            cmd = "%s ls-remote %s %s" % \
557                (ud.basecmd, repourl, search)
558            if ud.proto.lower() != 'file':
559                bb.fetch2.check_network_access(d, cmd, repourl)
560            output = runfetchcmd(cmd, d, True)
561            if not output:
562                raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
563        finally:
564            d.delVar('_BB_GIT_IN_LSREMOTE')
565        return output
566
567    def _latest_revision(self, ud, d, name):
568        """
569        Compute the HEAD revision for the url
570        """
571        output = self._lsremote(ud, d, "")
572        # Tags of the form ^{} may not work, need to fallback to other form
573        if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead:
574            head = ud.unresolvedrev[name]
575            tag = ud.unresolvedrev[name]
576        else:
577            head = "refs/heads/%s" % ud.unresolvedrev[name]
578            tag = "refs/tags/%s" % ud.unresolvedrev[name]
579        for s in [head, tag + "^{}", tag]:
580            for l in output.strip().split('\n'):
581                sha1, ref = l.split()
582                if s == ref:
583                    return sha1
584        raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
585            (ud.unresolvedrev[name], ud.host+ud.path))
586
587    def latest_versionstring(self, ud, d):
588        """
589        Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
590        by searching through the tags output of ls-remote, comparing
591        versions and returning the highest match.
592        """
593        pupver = ('', '')
594
595        tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or "(?P<pver>([0-9][\.|_]?)+)")
596        try:
597            output = self._lsremote(ud, d, "refs/tags/*")
598        except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
599            bb.note("Could not list remote: %s" % str(e))
600            return pupver
601
602        verstring = ""
603        revision = ""
604        for line in output.split("\n"):
605            if not line:
606                break
607
608            tag_head = line.split("/")[-1]
609            # Ignore non-released branches
610            m = re.search("(alpha|beta|rc|final)+", tag_head)
611            if m:
612                continue
613
614            # search for version in the line
615            tag = tagregex.search(tag_head)
616            if tag == None:
617                continue
618
619            tag = tag.group('pver')
620            tag = tag.replace("_", ".")
621
622            if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
623                continue
624
625            verstring = tag
626            revision = line.split()[0]
627            pupver = (verstring, revision)
628
629        return pupver
630
631    def _build_revision(self, ud, d, name):
632        return ud.revisions[name]
633
634    def gitpkgv_revision(self, ud, d, name):
635        """
636        Return a sortable revision number by counting commits in the history
637        Based on gitpkgv.bblass in meta-openembedded
638        """
639        rev = self._build_revision(ud, d, name)
640        localpath = ud.localpath
641        rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
642        if not os.path.exists(localpath):
643            commits = None
644        else:
645            if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
646                from pipes import quote
647                commits = bb.fetch2.runfetchcmd(
648                        "git rev-list %s -- | wc -l" % quote(rev),
649                        d, quiet=True).strip().lstrip('0')
650                if commits:
651                    open(rev_file, "w").write("%d\n" % int(commits))
652            else:
653                commits = open(rev_file, "r").readline(128).strip()
654        if commits:
655            return False, "%s+%s" % (commits, rev[:7])
656        else:
657            return True, str(rev)
658
659    def checkstatus(self, fetch, ud, d):
660        try:
661            self._lsremote(ud, d, "")
662            return True
663        except bb.fetch2.FetchError:
664            return False
665