xref: /openbmc/openbmc/poky/bitbake/lib/bb/fetch2/s3.py (revision c926e17c)
1"""
2BitBake 'Fetch' implementation for Amazon AWS S3.
3
4Class for fetching files from Amazon S3 using the AWS Command Line Interface.
5The aws tool must be correctly installed and configured prior to use.
6
7"""
8
9# Copyright (C) 2017, Andre McCurdy <armccurdy@gmail.com>
10#
11# Based in part on bb.fetch2.wget:
12#    Copyright (C) 2003, 2004  Chris Larson
13#
14# SPDX-License-Identifier: GPL-2.0-only
15#
16# Based on functions from the base bb module, Copyright 2003 Holger Schurig
17
18import os
19import bb
20import urllib.request, urllib.parse, urllib.error
21import re
22from bb.fetch2 import FetchMethod
23from bb.fetch2 import FetchError
24from bb.fetch2 import runfetchcmd
25
26def convertToBytes(value, unit):
27    value = float(value)
28    if (unit == "KiB"):
29        value = value*1024.0;
30    elif (unit == "MiB"):
31        value = value*1024.0*1024.0;
32    elif (unit == "GiB"):
33        value = value*1024.0*1024.0*1024.0;
34    return value
35
36class S3ProgressHandler(bb.progress.LineFilterProgressHandler):
37    """
38    Extract progress information from s3 cp output, e.g.:
39    Completed 5.1 KiB/8.8 GiB (12.0 MiB/s) with 1 file(s) remaining
40    """
41    def __init__(self, d):
42        super(S3ProgressHandler, self).__init__(d)
43        # Send an initial progress event so the bar gets shown
44        self._fire_progress(0)
45
46    def writeline(self, line):
47        percs = re.findall(r'^Completed (\d+.{0,1}\d*) (\w+)\/(\d+.{0,1}\d*) (\w+) (\(.+\)) with\s+', line)
48        if percs:
49            completed = (percs[-1][0])
50            completedUnit = (percs[-1][1])
51            total = (percs[-1][2])
52            totalUnit = (percs[-1][3])
53            completed = convertToBytes(completed, completedUnit)
54            total = convertToBytes(total, totalUnit)
55            progress = (completed/total)*100.0
56            rate = percs[-1][4]
57            self.update(progress, rate)
58            return False
59        return True
60
61
62class S3(FetchMethod):
63    """Class to fetch urls via 'aws s3'"""
64
65    def supports(self, ud, d):
66        """
67        Check to see if a given url can be fetched with s3.
68        """
69        return ud.type in ['s3']
70
71    def recommends_checksum(self, urldata):
72        return True
73
74    def urldata_init(self, ud, d):
75        if 'downloadfilename' in ud.parm:
76            ud.basename = ud.parm['downloadfilename']
77        else:
78            ud.basename = os.path.basename(ud.path)
79
80        ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
81
82        ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3"
83
84    def download(self, ud, d):
85        """
86        Fetch urls
87        Assumes localpath was called first
88        """
89
90        cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath)
91        bb.fetch2.check_network_access(d, cmd, ud.url)
92
93        progresshandler = S3ProgressHandler(d)
94        runfetchcmd(cmd, d, False, log=progresshandler)
95
96        # Additional sanity checks copied from the wget class (although there
97        # are no known issues which mean these are required, treat the aws cli
98        # tool with a little healthy suspicion).
99
100        if not os.path.exists(ud.localpath):
101            raise FetchError("The aws cp command returned success for s3://%s%s but %s doesn't exist?!" % (ud.host, ud.path, ud.localpath))
102
103        if os.path.getsize(ud.localpath) == 0:
104            os.remove(ud.localpath)
105            raise FetchError("The aws cp command for s3://%s%s resulted in a zero size file?! Deleting and failing since this isn't right." % (ud.host, ud.path))
106
107        return True
108
109    def checkstatus(self, fetch, ud, d):
110        """
111        Check the status of a URL
112        """
113
114        cmd = '%s ls s3://%s%s' % (ud.basecmd, ud.host, ud.path)
115        bb.fetch2.check_network_access(d, cmd, ud.url)
116        output = runfetchcmd(cmd, d)
117
118        # "aws s3 ls s3://mybucket/foo" will exit with success even if the file
119        # is not found, so check output of the command to confirm success.
120
121        if not output:
122            raise FetchError("The aws ls command for s3://%s%s gave empty output" % (ud.host, ud.path))
123
124        return True
125