1# Script utility functions
2#
3# Copyright (C) 2014 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import glob
9import logging
10import os
11import random
12import shlex
13import shutil
14import string
15import subprocess
16import sys
17import tempfile
18import threading
19import importlib
20import importlib.machinery
21import importlib.util
22
23class KeepAliveStreamHandler(logging.StreamHandler):
24    def __init__(self, keepalive=True, **kwargs):
25        super().__init__(**kwargs)
26        if keepalive is True:
27            keepalive = 5000  # default timeout
28        self._timeout = threading.Condition()
29        self._stop = False
30
31        # background thread waits on condition, if the condition does not
32        # happen emit a keep alive message
33        def thread():
34            while not self._stop:
35                with self._timeout:
36                    if not self._timeout.wait(keepalive):
37                        self.emit(logging.LogRecord("keepalive", logging.INFO,
38                                                    None, None, "Keepalive message", None, None))
39
40        self._thread = threading.Thread(target=thread, daemon=True)
41        self._thread.start()
42
43    def close(self):
44        # mark the thread to stop and notify it
45        self._stop = True
46        with self._timeout:
47            self._timeout.notify()
48        # wait for it to join
49        self._thread.join()
50        super().close()
51
52    def emit(self, record):
53        super().emit(record)
54        # trigger timer reset
55        with self._timeout:
56            self._timeout.notify()
57
58def logger_create(name, stream=None, keepalive=None):
59    logger = logging.getLogger(name)
60    if keepalive is not None:
61        loggerhandler = KeepAliveStreamHandler(stream=stream, keepalive=keepalive)
62    else:
63        loggerhandler = logging.StreamHandler(stream=stream)
64    loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
65    logger.addHandler(loggerhandler)
66    logger.setLevel(logging.INFO)
67    return logger
68
69def logger_setup_color(logger, color='auto'):
70    from bb.msg import BBLogFormatter
71
72    for handler in logger.handlers:
73        if (isinstance(handler, logging.StreamHandler) and
74                isinstance(handler.formatter, BBLogFormatter)):
75            if color == 'always' or (color == 'auto' and handler.stream.isatty()):
76                handler.formatter.enable_color()
77
78
79def load_plugins(logger, plugins, pluginpath):
80    def load_plugin(name):
81        logger.debug('Loading plugin %s' % name)
82        spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath])
83        if spec:
84            mod = importlib.util.module_from_spec(spec)
85            spec.loader.exec_module(mod)
86            return mod
87
88    def plugin_name(filename):
89        return os.path.splitext(os.path.basename(filename))[0]
90
91    known_plugins = [plugin_name(p.__name__) for p in plugins]
92    logger.debug('Loading plugins from %s...' % pluginpath)
93    for fn in glob.glob(os.path.join(pluginpath, '*.py')):
94        name = plugin_name(fn)
95        if name != '__init__' and name not in known_plugins:
96            plugin = load_plugin(name)
97            if hasattr(plugin, 'plugin_init'):
98                plugin.plugin_init(plugins)
99            plugins.append(plugin)
100
101
102def git_convert_standalone_clone(repodir):
103    """If specified directory is a git repository, ensure it's a standalone clone"""
104    import bb.process
105    if os.path.exists(os.path.join(repodir, '.git')):
106        alternatesfile = os.path.join(repodir, '.git', 'objects', 'info', 'alternates')
107        if os.path.exists(alternatesfile):
108            # This will have been cloned with -s, so we need to convert it so none
109            # of the contents is shared
110            bb.process.run('git repack -a', cwd=repodir)
111            os.remove(alternatesfile)
112
113def _get_temp_recipe_dir(d):
114    # This is a little bit hacky but we need to find a place where we can put
115    # the recipe so that bitbake can find it. We're going to delete it at the
116    # end so it doesn't really matter where we put it.
117    bbfiles = d.getVar('BBFILES').split()
118    fetchrecipedir = None
119    for pth in bbfiles:
120        if pth.endswith('.bb'):
121            pthdir = os.path.dirname(pth)
122            if os.access(os.path.dirname(os.path.dirname(pthdir)), os.W_OK):
123                fetchrecipedir = pthdir.replace('*', 'recipetool')
124                if pthdir.endswith('workspace/recipes/*'):
125                    # Prefer the workspace
126                    break
127    return fetchrecipedir
128
129class FetchUrlFailure(Exception):
130    def __init__(self, url):
131        self.url = url
132    def __str__(self):
133        return "Failed to fetch URL %s" % self.url
134
135def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirrors=False):
136    """
137    Fetch the specified URL using normal do_fetch and do_unpack tasks, i.e.
138    any dependencies that need to be satisfied in order to support the fetch
139    operation will be taken care of
140    """
141
142    import bb
143
144    checksums = {}
145    fetchrecipepn = None
146
147    # We need to put our temp directory under ${BASE_WORKDIR} otherwise
148    # we may have problems with the recipe-specific sysroot population
149    tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
150    bb.utils.mkdirhier(tmpparent)
151    tmpdir = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent)
152    try:
153        tmpworkdir = os.path.join(tmpdir, 'work')
154        logger.debug('fetch_url: temp dir is %s' % tmpdir)
155
156        fetchrecipedir = _get_temp_recipe_dir(tinfoil.config_data)
157        if not fetchrecipedir:
158            logger.error('Searched BBFILES but unable to find a writeable place to put temporary recipe')
159            sys.exit(1)
160        fetchrecipe = None
161        bb.utils.mkdirhier(fetchrecipedir)
162        try:
163            # Generate a dummy recipe so we can follow more or less normal paths
164            # for do_fetch and do_unpack
165            # I'd use tempfile functions here but underscores can be produced by that and those
166            # aren't allowed in recipe file names except to separate the version
167            rndstring = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(8))
168            fetchrecipe = os.path.join(fetchrecipedir, 'tmp-recipetool-%s.bb' % rndstring)
169            fetchrecipepn = os.path.splitext(os.path.basename(fetchrecipe))[0]
170            logger.debug('Generating initial recipe %s for fetching' % fetchrecipe)
171            with open(fetchrecipe, 'w') as f:
172                # We don't want to have to specify LIC_FILES_CHKSUM
173                f.write('LICENSE = "CLOSED"\n')
174                # We don't need the cross-compiler
175                f.write('INHIBIT_DEFAULT_DEPS = "1"\n')
176                # We don't have the checksums yet so we can't require them
177                f.write('BB_STRICT_CHECKSUM = "ignore"\n')
178                f.write('SRC_URI = "%s"\n' % srcuri)
179                f.write('SRCREV = "%s"\n' % srcrev)
180                f.write('PV = "0.0+"\n')
181                f.write('WORKDIR = "%s"\n' % tmpworkdir)
182                # Set S out of the way so it doesn't get created under the workdir
183                f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc'))
184                if not mirrors:
185                    # We do not need PREMIRRORS since we are almost certainly
186                    # fetching new source rather than something that has already
187                    # been fetched. Hence, we disable them by default.
188                    # However, we provide an option for users to enable it.
189                    f.write('PREMIRRORS = ""\n')
190                    f.write('MIRRORS = ""\n')
191
192            logger.info('Fetching %s...' % srcuri)
193
194            # FIXME this is too noisy at the moment
195
196            # Parse recipes so our new recipe gets picked up
197            tinfoil.parse_recipes()
198
199            def eventhandler(event):
200                if isinstance(event, bb.fetch2.MissingChecksumEvent):
201                    checksums.update(event.checksums)
202                    return True
203                return False
204
205            # Run the fetch + unpack tasks
206            res = tinfoil.build_targets(fetchrecipepn,
207                                        'do_unpack',
208                                        handle_events=True,
209                                        extra_events=['bb.fetch2.MissingChecksumEvent'],
210                                        event_callback=eventhandler)
211            if not res:
212                raise FetchUrlFailure(srcuri)
213
214            # Remove unneeded directories
215            rd = tinfoil.parse_recipe(fetchrecipepn)
216            if rd:
217                pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE']
218                for pathvar in pathvars:
219                    path = rd.getVar(pathvar)
220                    if os.path.exists(path):
221                        shutil.rmtree(path)
222        finally:
223            if fetchrecipe:
224                try:
225                    os.remove(fetchrecipe)
226                except FileNotFoundError:
227                    pass
228            try:
229                os.rmdir(fetchrecipedir)
230            except OSError as e:
231                import errno
232                if e.errno != errno.ENOTEMPTY:
233                    raise
234
235        bb.utils.mkdirhier(destdir)
236        for fn in os.listdir(tmpworkdir):
237            shutil.move(os.path.join(tmpworkdir, fn), destdir)
238
239    finally:
240        if not preserve_tmp:
241            shutil.rmtree(tmpdir)
242            tmpdir = None
243
244    return checksums, tmpdir
245
246
247def run_editor(fn, logger=None):
248    if isinstance(fn, str):
249        files = [fn]
250    else:
251        files = fn
252
253    editor = os.getenv('VISUAL', os.getenv('EDITOR', 'vi'))
254    try:
255        #print(shlex.split(editor) + files)
256        return subprocess.check_call(shlex.split(editor) + files)
257    except subprocess.CalledProcessError as exc:
258        logger.error("Execution of '%s' failed: %s" % (editor, exc))
259        return 1
260
261def is_src_url(param):
262    """
263    Check if a parameter is a URL and return True if so
264    NOTE: be careful about changing this as it will influence how devtool/recipetool command line handling works
265    """
266    if not param:
267        return False
268    elif '://' in param:
269        return True
270    elif param.startswith('git@') or ('@' in param and param.endswith('.git')):
271        return True
272    return False
273
274def filter_src_subdirs(pth):
275    """
276    Filter out subdirectories of initial unpacked source trees that we do not care about.
277    Used by devtool and recipetool.
278    """
279    dirlist = os.listdir(pth)
280    filterout = ['git.indirectionsymlink', 'source-date-epoch', 'sstate-install-recipe_qa']
281    dirlist = [x for x in dirlist if x not in filterout]
282    return dirlist
283