xref: /openbmc/openbmc/poky/scripts/lib/scriptutils.py (revision edff49234e31f23dc79f823473c9e286a21596c1)
1# Script utility functions
2#
3# Copyright (C) 2014 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import glob
9import logging
10import os
11import random
12import shlex
13import shutil
14import string
15import subprocess
16import sys
17import tempfile
18import threading
19import importlib
20import importlib.machinery
21import importlib.util
22
23class KeepAliveStreamHandler(logging.StreamHandler):
24    def __init__(self, keepalive=True, **kwargs):
25        super().__init__(**kwargs)
26        if keepalive is True:
27            keepalive = 5000  # default timeout
28        self._timeout = threading.Condition()
29        self._stop = False
30
31        # background thread waits on condition, if the condition does not
32        # happen emit a keep alive message
33        def thread():
34            while not self._stop:
35                with self._timeout:
36                    if not self._timeout.wait(keepalive):
37                        self.emit(logging.LogRecord("keepalive", logging.INFO,
38                                                    None, None, "Keepalive message", None, None))
39
40        self._thread = threading.Thread(target=thread, daemon=True)
41        self._thread.start()
42
43    def close(self):
44        # mark the thread to stop and notify it
45        self._stop = True
46        with self._timeout:
47            self._timeout.notify()
48        # wait for it to join
49        self._thread.join()
50        super().close()
51
52    def emit(self, record):
53        super().emit(record)
54        # trigger timer reset
55        with self._timeout:
56            self._timeout.notify()
57
58def logger_create(name, stream=None, keepalive=None):
59    logger = logging.getLogger(name)
60    if keepalive is not None:
61        loggerhandler = KeepAliveStreamHandler(stream=stream, keepalive=keepalive)
62    else:
63        loggerhandler = logging.StreamHandler(stream=stream)
64    loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
65    logger.addHandler(loggerhandler)
66    logger.setLevel(logging.INFO)
67    return logger
68
69def logger_setup_color(logger, color='auto'):
70    from bb.msg import BBLogFormatter
71
72    for handler in logger.handlers:
73        if (isinstance(handler, logging.StreamHandler) and
74                isinstance(handler.formatter, BBLogFormatter)):
75            if color == 'always' or (color == 'auto' and handler.stream.isatty()):
76                handler.formatter.enable_color()
77
78
79def load_plugins(logger, plugins, pluginpath):
80    def load_plugin(name):
81        logger.debug('Loading plugin %s' % name)
82        spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath])
83        if spec:
84            mod = importlib.util.module_from_spec(spec)
85            spec.loader.exec_module(mod)
86            return mod
87
88    def plugin_name(filename):
89        return os.path.splitext(os.path.basename(filename))[0]
90
91    known_plugins = [plugin_name(p.__name__) for p in plugins]
92    logger.debug('Loading plugins from %s...' % pluginpath)
93    for fn in glob.glob(os.path.join(pluginpath, '*.py')):
94        name = plugin_name(fn)
95        if name != '__init__' and name not in known_plugins:
96            plugin = load_plugin(name)
97            if hasattr(plugin, 'plugin_init'):
98                plugin.plugin_init(plugins)
99            plugins.append(plugin)
100
101
102def git_convert_standalone_clone(repodir):
103    """If specified directory is a git repository, ensure it's a standalone clone"""
104    import bb.process
105    if os.path.exists(os.path.join(repodir, '.git')):
106        alternatesfile = os.path.join(repodir, '.git', 'objects', 'info', 'alternates')
107        if os.path.exists(alternatesfile):
108            # This will have been cloned with -s, so we need to convert it so none
109            # of the contents is shared
110            bb.process.run('git repack -a', cwd=repodir)
111            os.remove(alternatesfile)
112
113def _get_temp_recipe_dir(d):
114    # This is a little bit hacky but we need to find a place where we can put
115    # the recipe so that bitbake can find it. We're going to delete it at the
116    # end so it doesn't really matter where we put it.
117    bbfiles = d.getVar('BBFILES').split()
118    fetchrecipedir = None
119    for pth in bbfiles:
120        if pth.endswith('.bb'):
121            pthdir = os.path.dirname(pth)
122            if os.access(os.path.dirname(os.path.dirname(pthdir)), os.W_OK):
123                fetchrecipedir = pthdir.replace('*', 'recipetool')
124                if pthdir.endswith('workspace/recipes/*'):
125                    # Prefer the workspace
126                    break
127    return fetchrecipedir
128
129class FetchUrlFailure(Exception):
130    def __init__(self, url):
131        self.url = url
132    def __str__(self):
133        return "Failed to fetch URL %s" % self.url
134
135def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirrors=False):
136    """
137    Fetch the specified URL using normal do_fetch and do_unpack tasks, i.e.
138    any dependencies that need to be satisfied in order to support the fetch
139    operation will be taken care of
140    """
141
142    import bb
143
144    checksums = {}
145    fetchrecipepn = None
146
147    # We need to put our temp directory under ${BASE_WORKDIR} otherwise
148    # we may have problems with the recipe-specific sysroot population
149    tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
150    bb.utils.mkdirhier(tmpparent)
151    tmpdir = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent)
152    try:
153        tmpworkdir = os.path.join(tmpdir, 'work')
154        logger.debug('fetch_url: temp dir is %s' % tmpdir)
155
156        fetchrecipedir = _get_temp_recipe_dir(tinfoil.config_data)
157        if not fetchrecipedir:
158            logger.error('Searched BBFILES but unable to find a writeable place to put temporary recipe')
159            sys.exit(1)
160        fetchrecipe = None
161        bb.utils.mkdirhier(fetchrecipedir)
162        try:
163            # Generate a dummy recipe so we can follow more or less normal paths
164            # for do_fetch and do_unpack
165            # I'd use tempfile functions here but underscores can be produced by that and those
166            # aren't allowed in recipe file names except to separate the version
167            rndstring = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(8))
168            fetchrecipe = os.path.join(fetchrecipedir, 'tmp-recipetool-%s.bb' % rndstring)
169            fetchrecipepn = os.path.splitext(os.path.basename(fetchrecipe))[0]
170            logger.debug('Generating initial recipe %s for fetching' % fetchrecipe)
171            with open(fetchrecipe, 'w') as f:
172                # We don't want to have to specify LIC_FILES_CHKSUM
173                f.write('LICENSE = "CLOSED"\n')
174                # We don't need the cross-compiler
175                f.write('INHIBIT_DEFAULT_DEPS = "1"\n')
176                # We don't have the checksums yet so we can't require them
177                f.write('BB_STRICT_CHECKSUM = "ignore"\n')
178                f.write('SRC_URI = "%s"\n' % srcuri)
179                f.write('SRCREV = "%s"\n' % srcrev)
180                f.write('PV = "0.0+"\n')
181                f.write('WORKDIR = "%s"\n' % tmpworkdir)
182                f.write('UNPACKDIR = "%s"\n' % destdir)
183
184                # Set S out of the way so it doesn't get created under the workdir
185                f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc'))
186                if not mirrors:
187                    # We do not need PREMIRRORS since we are almost certainly
188                    # fetching new source rather than something that has already
189                    # been fetched. Hence, we disable them by default.
190                    # However, we provide an option for users to enable it.
191                    f.write('PREMIRRORS = ""\n')
192                    f.write('MIRRORS = ""\n')
193
194            logger.info('Fetching %s...' % srcuri)
195
196            # FIXME this is too noisy at the moment
197
198            # Parse recipes so our new recipe gets picked up
199            tinfoil.parse_recipes()
200
201            def eventhandler(event):
202                if isinstance(event, bb.fetch2.MissingChecksumEvent):
203                    checksums.update(event.checksums)
204                    return True
205                return False
206
207            # Run the fetch + unpack tasks
208            res = tinfoil.build_targets(fetchrecipepn,
209                                        'do_unpack',
210                                        handle_events=True,
211                                        extra_events=['bb.fetch2.MissingChecksumEvent'],
212                                        event_callback=eventhandler)
213            if not res:
214                raise FetchUrlFailure(srcuri)
215
216            # Remove unneeded directories
217            rd = tinfoil.parse_recipe(fetchrecipepn)
218            if rd:
219                pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE']
220                for pathvar in pathvars:
221                    path = rd.getVar(pathvar)
222                    if os.path.exists(path):
223                        shutil.rmtree(path)
224        finally:
225            if fetchrecipe:
226                try:
227                    os.remove(fetchrecipe)
228                except FileNotFoundError:
229                    pass
230            try:
231                os.rmdir(fetchrecipedir)
232            except OSError as e:
233                import errno
234                if e.errno != errno.ENOTEMPTY:
235                    raise
236
237    finally:
238        if not preserve_tmp:
239            shutil.rmtree(tmpdir)
240            tmpdir = None
241
242    return checksums, tmpdir
243
244
245def run_editor(fn, logger=None):
246    if isinstance(fn, str):
247        files = [fn]
248    else:
249        files = fn
250
251    editor = os.getenv('VISUAL', os.getenv('EDITOR', 'vi'))
252    try:
253        #print(shlex.split(editor) + files)
254        return subprocess.check_call(shlex.split(editor) + files)
255    except subprocess.CalledProcessError as exc:
256        logger.error("Execution of '%s' failed: %s" % (editor, exc))
257        return 1
258
259def is_src_url(param):
260    """
261    Check if a parameter is a URL and return True if so
262    NOTE: be careful about changing this as it will influence how devtool/recipetool command line handling works
263    """
264    if not param:
265        return False
266    elif '://' in param:
267        return True
268    elif param.startswith('git@') or ('@' in param and param.endswith('.git')):
269        return True
270    return False
271
272