xref: /openbmc/openbmc/poky/bitbake/lib/bb/fetch2/wget.py (revision d159c7fb)
1"""
2BitBake 'Fetch' implementations
3
4Classes for obtaining upstream sources for the
5BitBake build tools.
6
7"""
8
9# Copyright (C) 2003, 2004  Chris Larson
10#
11# SPDX-License-Identifier: GPL-2.0-only
12#
13# Based on functions from the base bb module, Copyright 2003 Holger Schurig
14
15import shlex
16import re
17import tempfile
18import os
19import errno
20import bb
21import bb.progress
22import socket
23import http.client
24import urllib.request, urllib.parse, urllib.error
25from   bb.fetch2 import FetchMethod
26from   bb.fetch2 import FetchError
27from   bb.fetch2 import logger
28from   bb.fetch2 import runfetchcmd
29from   bb.utils import export_proxies
30from   bs4 import BeautifulSoup
31from   bs4 import SoupStrainer
32
33class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
34    """
35    Extract progress information from wget output.
36    Note: relies on --progress=dot (with -v or without -q/-nv) being
37    specified on the wget command line.
38    """
39    def __init__(self, d):
40        super(WgetProgressHandler, self).__init__(d)
41        # Send an initial progress event so the bar gets shown
42        self._fire_progress(0)
43
44    def writeline(self, line):
45        percs = re.findall(r'(\d+)%\s+([\d.]+[A-Z])', line)
46        if percs:
47            progress = int(percs[-1][0])
48            rate = percs[-1][1] + '/s'
49            self.update(progress, rate)
50            return False
51        return True
52
53
54class Wget(FetchMethod):
55    """Class to fetch urls via 'wget'"""
56
57    # CDNs like CloudFlare may do a 'browser integrity test' which can fail
58    # with the standard wget/urllib User-Agent, so pretend to be a modern
59    # browser.
60    user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0"
61
62    def check_certs(self, d):
63        """
64        Should certificates be checked?
65        """
66        return (d.getVar("BB_CHECK_SSL_CERTS") or "1") != "0"
67
68    def supports(self, ud, d):
69        """
70        Check to see if a given url can be fetched with wget.
71        """
72        return ud.type in ['http', 'https', 'ftp']
73
74    def recommends_checksum(self, urldata):
75        return True
76
77    def urldata_init(self, ud, d):
78        if 'protocol' in ud.parm:
79            if ud.parm['protocol'] == 'git':
80                raise bb.fetch2.ParameterError("Invalid protocol - if you wish to fetch from a git repository using http, you need to instead use the git:// prefix with protocol=http", ud.url)
81
82        if 'downloadfilename' in ud.parm:
83            ud.basename = ud.parm['downloadfilename']
84        else:
85            ud.basename = os.path.basename(ud.path)
86
87        ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
88        if not ud.localfile:
89            ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."))
90
91        self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp"
92
93        if not self.check_certs(d):
94            self.basecmd += " --no-check-certificate"
95
96    def _runwget(self, ud, d, command, quiet, workdir=None):
97
98        progresshandler = WgetProgressHandler(d)
99
100        logger.debug2("Fetching %s using command '%s'" % (ud.url, command))
101        bb.fetch2.check_network_access(d, command, ud.url)
102        runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir)
103
104    def download(self, ud, d):
105        """Fetch urls"""
106
107        fetchcmd = self.basecmd
108
109        if 'downloadfilename' in ud.parm:
110            localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile)
111            bb.utils.mkdirhier(os.path.dirname(localpath))
112            fetchcmd += " -O %s" % shlex.quote(localpath)
113
114        if ud.user and ud.pswd:
115            fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd)
116
117        uri = ud.url.split(";")[0]
118        if os.path.exists(ud.localpath):
119            # file exists, but we didnt complete it.. trying again..
120            fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri)
121        else:
122            fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri)
123
124        self._runwget(ud, d, fetchcmd, False)
125
126        # Sanity check since wget can pretend it succeed when it didn't
127        # Also, this used to happen if sourceforge sent us to the mirror page
128        if not os.path.exists(ud.localpath):
129            raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
130
131        if os.path.getsize(ud.localpath) == 0:
132            os.remove(ud.localpath)
133            raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
134
135        return True
136
137    def checkstatus(self, fetch, ud, d, try_again=True):
138        class HTTPConnectionCache(http.client.HTTPConnection):
139            if fetch.connection_cache:
140                def connect(self):
141                    """Connect to the host and port specified in __init__."""
142
143                    sock = fetch.connection_cache.get_connection(self.host, self.port)
144                    if sock:
145                        self.sock = sock
146                    else:
147                        self.sock = socket.create_connection((self.host, self.port),
148                                    self.timeout, self.source_address)
149                        fetch.connection_cache.add_connection(self.host, self.port, self.sock)
150
151                    if self._tunnel_host:
152                        self._tunnel()
153
154        class CacheHTTPHandler(urllib.request.HTTPHandler):
155            def http_open(self, req):
156                return self.do_open(HTTPConnectionCache, req)
157
158            def do_open(self, http_class, req):
159                """Return an addinfourl object for the request, using http_class.
160
161                http_class must implement the HTTPConnection API from httplib.
162                The addinfourl return value is a file-like object.  It also
163                has methods and attributes including:
164                    - info(): return a mimetools.Message object for the headers
165                    - geturl(): return the original request URL
166                    - code: HTTP status code
167                """
168                host = req.host
169                if not host:
170                    raise urllib.error.URLError('no host given')
171
172                h = http_class(host, timeout=req.timeout) # will parse host:port
173                h.set_debuglevel(self._debuglevel)
174
175                headers = dict(req.unredirected_hdrs)
176                headers.update(dict((k, v) for k, v in list(req.headers.items())
177                            if k not in headers))
178
179                # We want to make an HTTP/1.1 request, but the addinfourl
180                # class isn't prepared to deal with a persistent connection.
181                # It will try to read all remaining data from the socket,
182                # which will block while the server waits for the next request.
183                # So make sure the connection gets closed after the (only)
184                # request.
185
186                # Don't close connection when connection_cache is enabled,
187                if fetch.connection_cache is None:
188                    headers["Connection"] = "close"
189                else:
190                    headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
191
192                headers = dict(
193                    (name.title(), val) for name, val in list(headers.items()))
194
195                if req._tunnel_host:
196                    tunnel_headers = {}
197                    proxy_auth_hdr = "Proxy-Authorization"
198                    if proxy_auth_hdr in headers:
199                        tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
200                        # Proxy-Authorization should not be sent to origin
201                        # server.
202                        del headers[proxy_auth_hdr]
203                    h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
204
205                try:
206                    h.request(req.get_method(), req.selector, req.data, headers)
207                except socket.error as err: # XXX what error?
208                    # Don't close connection when cache is enabled.
209                    # Instead, try to detect connections that are no longer
210                    # usable (for example, closed unexpectedly) and remove
211                    # them from the cache.
212                    if fetch.connection_cache is None:
213                        h.close()
214                    elif isinstance(err, OSError) and err.errno == errno.EBADF:
215                        # This happens when the server closes the connection despite the Keep-Alive.
216                        # Apparently urllib then uses the file descriptor, expecting it to be
217                        # connected, when in reality the connection is already gone.
218                        # We let the request fail and expect it to be
219                        # tried once more ("try_again" in check_status()),
220                        # with the dead connection removed from the cache.
221                        # If it still fails, we give up, which can happend for bad
222                        # HTTP proxy settings.
223                        fetch.connection_cache.remove_connection(h.host, h.port)
224                    raise urllib.error.URLError(err)
225                else:
226                    r = h.getresponse()
227
228                # Pick apart the HTTPResponse object to get the addinfourl
229                # object initialized properly.
230
231                # Wrap the HTTPResponse object in socket's file object adapter
232                # for Windows.  That adapter calls recv(), so delegate recv()
233                # to read().  This weird wrapping allows the returned object to
234                # have readline() and readlines() methods.
235
236                # XXX It might be better to extract the read buffering code
237                # out of socket._fileobject() and into a base class.
238                r.recv = r.read
239
240                # no data, just have to read
241                r.read()
242                class fp_dummy(object):
243                    def read(self):
244                        return ""
245                    def readline(self):
246                        return ""
247                    def close(self):
248                        pass
249                    closed = False
250
251                resp = urllib.response.addinfourl(fp_dummy(), r.msg, req.get_full_url())
252                resp.code = r.status
253                resp.msg = r.reason
254
255                # Close connection when server request it.
256                if fetch.connection_cache is not None:
257                    if 'Connection' in r.msg and r.msg['Connection'] == 'close':
258                        fetch.connection_cache.remove_connection(h.host, h.port)
259
260                return resp
261
262        class HTTPMethodFallback(urllib.request.BaseHandler):
263            """
264            Fallback to GET if HEAD is not allowed (405 HTTP error)
265            """
266            def http_error_405(self, req, fp, code, msg, headers):
267                fp.read()
268                fp.close()
269
270                if req.get_method() != 'GET':
271                    newheaders = dict((k, v) for k, v in list(req.headers.items())
272                                      if k.lower() not in ("content-length", "content-type"))
273                    return self.parent.open(urllib.request.Request(req.get_full_url(),
274                                                            headers=newheaders,
275                                                            origin_req_host=req.origin_req_host,
276                                                            unverifiable=True))
277
278                raise urllib.request.HTTPError(req, code, msg, headers, None)
279
280            # Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
281            # Forbidden when they actually mean 405 Method Not Allowed.
282            http_error_403 = http_error_405
283
284
285        class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler):
286            """
287            urllib2.HTTPRedirectHandler resets the method to GET on redirect,
288            when we want to follow redirects using the original method.
289            """
290            def redirect_request(self, req, fp, code, msg, headers, newurl):
291                newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
292                newreq.get_method = req.get_method
293                return newreq
294
295        # We need to update the environment here as both the proxy and HTTPS
296        # handlers need variables set. The proxy needs http_proxy and friends to
297        # be set, and HTTPSHandler ends up calling into openssl to load the
298        # certificates. In buildtools configurations this will be looking at the
299        # wrong place for certificates by default: we set SSL_CERT_FILE to the
300        # right location in the buildtools environment script but as BitBake
301        # prunes prunes the environment this is lost. When binaries are executed
302        # runfetchcmd ensures these values are in the environment, but this is
303        # pure Python so we need to update the environment.
304        #
305        # Avoid tramping the environment too much by using bb.utils.environment
306        # to scope the changes to the build_opener request, which is when the
307        # environment lookups happen.
308        newenv = {}
309        for name in bb.fetch2.FETCH_EXPORT_VARS:
310            value = d.getVar(name)
311            if not value:
312                origenv = d.getVar("BB_ORIGENV")
313                if origenv:
314                    value = origenv.getVar(name)
315            if value:
316                newenv[name] = value
317
318        with bb.utils.environment(**newenv):
319            import ssl
320
321            if self.check_certs(d):
322                context = ssl.create_default_context()
323            else:
324                context = ssl._create_unverified_context()
325
326            handlers = [FixedHTTPRedirectHandler,
327                        HTTPMethodFallback,
328                        urllib.request.ProxyHandler(),
329                        CacheHTTPHandler(),
330                        urllib.request.HTTPSHandler(context=context)]
331            opener = urllib.request.build_opener(*handlers)
332
333            try:
334                uri = ud.url.split(";")[0]
335                r = urllib.request.Request(uri)
336                r.get_method = lambda: "HEAD"
337                # Some servers (FusionForge, as used on Alioth) require that the
338                # optional Accept header is set.
339                r.add_header("Accept", "*/*")
340                r.add_header("User-Agent", self.user_agent)
341                def add_basic_auth(login_str, request):
342                    '''Adds Basic auth to http request, pass in login:password as string'''
343                    import base64
344                    encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
345                    authheader = "Basic %s" % encodeuser
346                    r.add_header("Authorization", authheader)
347
348                if ud.user and ud.pswd:
349                    add_basic_auth(ud.user + ':' + ud.pswd, r)
350
351                try:
352                    import netrc
353                    n = netrc.netrc()
354                    login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname)
355                    add_basic_auth("%s:%s" % (login, password), r)
356                except (TypeError, ImportError, IOError, netrc.NetrcParseError):
357                    pass
358
359                with opener.open(r) as response:
360                    pass
361            except urllib.error.URLError as e:
362                if try_again:
363                    logger.debug2("checkstatus: trying again")
364                    return self.checkstatus(fetch, ud, d, False)
365                else:
366                    # debug for now to avoid spamming the logs in e.g. remote sstate searches
367                    logger.debug2("checkstatus() urlopen failed: %s" % e)
368                    return False
369            except ConnectionResetError as e:
370                if try_again:
371                    logger.debug2("checkstatus: trying again")
372                    return self.checkstatus(fetch, ud, d, False)
373                else:
374                    # debug for now to avoid spamming the logs in e.g. remote sstate searches
375                    logger.debug2("checkstatus() urlopen failed: %s" % e)
376                    return False
377
378        return True
379
380    def _parse_path(self, regex, s):
381        """
382        Find and group name, version and archive type in the given string s
383        """
384
385        m = regex.search(s)
386        if m:
387            pname = ''
388            pver = ''
389            ptype = ''
390
391            mdict = m.groupdict()
392            if 'name' in mdict.keys():
393                pname = mdict['name']
394            if 'pver' in mdict.keys():
395                pver = mdict['pver']
396            if 'type' in mdict.keys():
397                ptype = mdict['type']
398
399            bb.debug(3, "_parse_path: %s, %s, %s" % (pname, pver, ptype))
400
401            return (pname, pver, ptype)
402
403        return None
404
405    def _modelate_version(self, version):
406        if version[0] in ['.', '-']:
407            if version[1].isdigit():
408                version = version[1] + version[0] + version[2:len(version)]
409            else:
410                version = version[1:len(version)]
411
412        version = re.sub('-', '.', version)
413        version = re.sub('_', '.', version)
414        version = re.sub('(rc)+', '.1000.', version)
415        version = re.sub('(beta)+', '.100.', version)
416        version = re.sub('(alpha)+', '.10.', version)
417        if version[0] == 'v':
418            version = version[1:len(version)]
419        return version
420
421    def _vercmp(self, old, new):
422        """
423        Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
424        purpose. PE is cleared in comparison as it's not for build, and PR is cleared too
425        for simplicity as it's somehow difficult to get from various upstream format
426        """
427
428        (oldpn, oldpv, oldsuffix) = old
429        (newpn, newpv, newsuffix) = new
430
431        # Check for a new suffix type that we have never heard of before
432        if newsuffix:
433            m = self.suffix_regex_comp.search(newsuffix)
434            if not m:
435                bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
436                return False
437
438        # Not our package so ignore it
439        if oldpn != newpn:
440            return False
441
442        oldpv = self._modelate_version(oldpv)
443        newpv = self._modelate_version(newpv)
444
445        return bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, ""))
446
447    def _fetch_index(self, uri, ud, d):
448        """
449        Run fetch checkstatus to get directory information
450        """
451        f = tempfile.NamedTemporaryFile()
452        with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f:
453            fetchcmd = self.basecmd
454            fetchcmd += " -O " + f.name + " --user-agent='" + self.user_agent + "' '" + uri + "'"
455            try:
456                self._runwget(ud, d, fetchcmd, True, workdir=workdir)
457                fetchresult = f.read()
458            except bb.fetch2.BBFetchException:
459                fetchresult = ""
460
461        return fetchresult
462
463    def _check_latest_version(self, url, package, package_regex, current_version, ud, d):
464        """
465        Return the latest version of a package inside a given directory path
466        If error or no version, return ""
467        """
468        valid = 0
469        version = ['', '', '']
470
471        bb.debug(3, "VersionURL: %s" % (url))
472        soup = BeautifulSoup(self._fetch_index(url, ud, d), "html.parser", parse_only=SoupStrainer("a"))
473        if not soup:
474            bb.debug(3, "*** %s NO SOUP" % (url))
475            return ""
476
477        for line in soup.find_all('a', href=True):
478            bb.debug(3, "line['href'] = '%s'" % (line['href']))
479            bb.debug(3, "line = '%s'" % (str(line)))
480
481            newver = self._parse_path(package_regex, line['href'])
482            if not newver:
483                newver = self._parse_path(package_regex, str(line))
484
485            if newver:
486                bb.debug(3, "Upstream version found: %s" % newver[1])
487                if valid == 0:
488                    version = newver
489                    valid = 1
490                elif self._vercmp(version, newver) < 0:
491                    version = newver
492
493        pupver = re.sub('_', '.', version[1])
494
495        bb.debug(3, "*** %s -> UpstreamVersion = %s (CurrentVersion = %s)" %
496                (package, pupver or "N/A", current_version[1]))
497
498        if valid:
499            return pupver
500
501        return ""
502
503    def _check_latest_version_by_dir(self, dirver, package, package_regex, current_version, ud, d):
504        """
505        Scan every directory in order to get upstream version.
506        """
507        version_dir = ['', '', '']
508        version = ['', '', '']
509
510        dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])*(\d+))")
511        s = dirver_regex.search(dirver)
512        if s:
513            version_dir[1] = s.group('ver')
514        else:
515            version_dir[1] = dirver
516
517        dirs_uri = bb.fetch.encodeurl([ud.type, ud.host,
518                ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
519        bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))
520
521        soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d), "html.parser", parse_only=SoupStrainer("a"))
522        if not soup:
523            return version[1]
524
525        for line in soup.find_all('a', href=True):
526            s = dirver_regex.search(line['href'].strip("/"))
527            if s:
528                sver = s.group('ver')
529
530                # When prefix is part of the version directory it need to
531                # ensure that only version directory is used so remove previous
532                # directories if exists.
533                #
534                # Example: pfx = '/dir1/dir2/v' and version = '2.5' the expected
535                # result is v2.5.
536                spfx = s.group('pfx').split('/')[-1]
537
538                version_dir_new = ['', sver, '']
539                if self._vercmp(version_dir, version_dir_new) <= 0:
540                    dirver_new = spfx + sver
541                    path = ud.path.replace(dirver, dirver_new, True) \
542                        .split(package)[0]
543                    uri = bb.fetch.encodeurl([ud.type, ud.host, path,
544                        ud.user, ud.pswd, {}])
545
546                    pupver = self._check_latest_version(uri,
547                            package, package_regex, current_version, ud, d)
548                    if pupver:
549                        version[1] = pupver
550
551                    version_dir = version_dir_new
552
553        return version[1]
554
555    def _init_regexes(self, package, ud, d):
556        """
557        Match as many patterns as possible such as:
558                gnome-common-2.20.0.tar.gz (most common format)
559                gtk+-2.90.1.tar.gz
560                xf86-input-synaptics-12.6.9.tar.gz
561                dri2proto-2.3.tar.gz
562                blktool_4.orig.tar.gz
563                libid3tag-0.15.1b.tar.gz
564                unzip552.tar.gz
565                icu4c-3_6-src.tgz
566                genext2fs_1.3.orig.tar.gz
567                gst-fluendo-mp3
568        """
569        # match most patterns which uses "-" as separator to version digits
570        pn_prefix1 = r"[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
571        # a loose pattern such as for unzip552.tar.gz
572        pn_prefix2 = r"[a-zA-Z]+"
573        # a loose pattern such as for 80325-quicky-0.4.tar.gz
574        pn_prefix3 = r"[0-9]+[-]?[a-zA-Z]+"
575        # Save the Package Name (pn) Regex for use later
576        pn_regex = r"(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
577
578        # match version
579        pver_regex = r"(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
580
581        # match arch
582        parch_regex = "-source|_all_"
583
584        # src.rpm extension was added only for rpm package. Can be removed if the rpm
585        # packaged will always be considered as having to be manually upgraded
586        psuffix_regex = r"(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
587
588        # match name, version and archive type of a package
589        package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
590                                                    % (pn_regex, pver_regex, parch_regex, psuffix_regex))
591        self.suffix_regex_comp = re.compile(psuffix_regex)
592
593        # compile regex, can be specific by package or generic regex
594        pn_regex = d.getVar('UPSTREAM_CHECK_REGEX')
595        if pn_regex:
596            package_custom_regex_comp = re.compile(pn_regex)
597        else:
598            version = self._parse_path(package_regex_comp, package)
599            if version:
600                package_custom_regex_comp = re.compile(
601                    r"(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
602                    (re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
603            else:
604                package_custom_regex_comp = None
605
606        return package_custom_regex_comp
607
608    def latest_versionstring(self, ud, d):
609        """
610        Manipulate the URL and try to obtain the latest package version
611
612        sanity check to ensure same name and type.
613        """
614        package = ud.path.split("/")[-1]
615        current_version = ['', d.getVar('PV'), '']
616
617        """possible to have no version in pkg name, such as spectrum-fw"""
618        if not re.search(r"\d+", package):
619            current_version[1] = re.sub('_', '.', current_version[1])
620            current_version[1] = re.sub('-', '.', current_version[1])
621            return (current_version[1], '')
622
623        package_regex = self._init_regexes(package, ud, d)
624        if package_regex is None:
625            bb.warn("latest_versionstring: package %s don't match pattern" % (package))
626            return ('', '')
627        bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
628
629        uri = ""
630        regex_uri = d.getVar("UPSTREAM_CHECK_URI")
631        if not regex_uri:
632            path = ud.path.split(package)[0]
633
634            # search for version matches on folders inside the path, like:
635            # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
636            dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
637            m = dirver_regex.search(path)
638            if m:
639                pn = d.getVar('PN')
640                dirver = m.group('dirver')
641
642                dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn)))
643                if not dirver_pn_regex.search(dirver):
644                    return (self._check_latest_version_by_dir(dirver,
645                        package, package_regex, current_version, ud, d), '')
646
647            uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}])
648        else:
649            uri = regex_uri
650
651        return (self._check_latest_version(uri, package, package_regex,
652                current_version, ud, d), '')
653