1# Script utility functions 2# 3# Copyright (C) 2014 Intel Corporation 4# 5# SPDX-License-Identifier: GPL-2.0-only 6# 7 8import argparse 9import glob 10import logging 11import os 12import random 13import shlex 14import shutil 15import string 16import subprocess 17import sys 18import tempfile 19import threading 20import importlib 21from importlib import machinery 22 23class KeepAliveStreamHandler(logging.StreamHandler): 24 def __init__(self, keepalive=True, **kwargs): 25 super().__init__(**kwargs) 26 if keepalive is True: 27 keepalive = 5000 # default timeout 28 self._timeout = threading.Condition() 29 self._stop = False 30 31 # background thread waits on condition, if the condition does not 32 # happen emit a keep alive message 33 def thread(): 34 while not self._stop: 35 with self._timeout: 36 if not self._timeout.wait(keepalive): 37 self.emit(logging.LogRecord("keepalive", logging.INFO, 38 None, None, "Keepalive message", None, None)) 39 40 self._thread = threading.Thread(target = thread, daemon = True) 41 self._thread.start() 42 43 def close(self): 44 # mark the thread to stop and notify it 45 self._stop = True 46 with self._timeout: 47 self._timeout.notify() 48 # wait for it to join 49 self._thread.join() 50 super().close() 51 52 def emit(self, record): 53 super().emit(record) 54 # trigger timer reset 55 with self._timeout: 56 self._timeout.notify() 57 58def logger_create(name, stream=None, keepalive=None): 59 logger = logging.getLogger(name) 60 if keepalive is not None: 61 loggerhandler = KeepAliveStreamHandler(stream=stream, keepalive=keepalive) 62 else: 63 loggerhandler = logging.StreamHandler(stream=stream) 64 loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s")) 65 logger.addHandler(loggerhandler) 66 logger.setLevel(logging.INFO) 67 return logger 68 69def logger_setup_color(logger, color='auto'): 70 from bb.msg import BBLogFormatter 71 72 for handler in logger.handlers: 73 if (isinstance(handler, logging.StreamHandler) and 74 isinstance(handler.formatter, BBLogFormatter)): 75 if color == 'always' or (color == 'auto' and handler.stream.isatty()): 76 handler.formatter.enable_color() 77 78 79def load_plugins(logger, plugins, pluginpath): 80 81 def load_plugin(name): 82 logger.debug('Loading plugin %s' % name) 83 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) 84 if spec: 85 return spec.loader.load_module() 86 87 def plugin_name(filename): 88 return os.path.splitext(os.path.basename(filename))[0] 89 90 known_plugins = [plugin_name(p.__name__) for p in plugins] 91 logger.debug('Loading plugins from %s...' % pluginpath) 92 for fn in glob.glob(os.path.join(pluginpath, '*.py')): 93 name = plugin_name(fn) 94 if name != '__init__' and name not in known_plugins: 95 plugin = load_plugin(name) 96 if hasattr(plugin, 'plugin_init'): 97 plugin.plugin_init(plugins) 98 plugins.append(plugin) 99 100 101def git_convert_standalone_clone(repodir): 102 """If specified directory is a git repository, ensure it's a standalone clone""" 103 import bb.process 104 if os.path.exists(os.path.join(repodir, '.git')): 105 alternatesfile = os.path.join(repodir, '.git', 'objects', 'info', 'alternates') 106 if os.path.exists(alternatesfile): 107 # This will have been cloned with -s, so we need to convert it so none 108 # of the contents is shared 109 bb.process.run('git repack -a', cwd=repodir) 110 os.remove(alternatesfile) 111 112def _get_temp_recipe_dir(d): 113 # This is a little bit hacky but we need to find a place where we can put 114 # the recipe so that bitbake can find it. We're going to delete it at the 115 # end so it doesn't really matter where we put it. 116 bbfiles = d.getVar('BBFILES').split() 117 fetchrecipedir = None 118 for pth in bbfiles: 119 if pth.endswith('.bb'): 120 pthdir = os.path.dirname(pth) 121 if os.access(os.path.dirname(os.path.dirname(pthdir)), os.W_OK): 122 fetchrecipedir = pthdir.replace('*', 'recipetool') 123 if pthdir.endswith('workspace/recipes/*'): 124 # Prefer the workspace 125 break 126 return fetchrecipedir 127 128class FetchUrlFailure(Exception): 129 def __init__(self, url): 130 self.url = url 131 def __str__(self): 132 return "Failed to fetch URL %s" % self.url 133 134def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirrors=False): 135 """ 136 Fetch the specified URL using normal do_fetch and do_unpack tasks, i.e. 137 any dependencies that need to be satisfied in order to support the fetch 138 operation will be taken care of 139 """ 140 141 import bb 142 143 checksums = {} 144 fetchrecipepn = None 145 146 # We need to put our temp directory under ${BASE_WORKDIR} otherwise 147 # we may have problems with the recipe-specific sysroot population 148 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') 149 bb.utils.mkdirhier(tmpparent) 150 tmpdir = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent) 151 try: 152 tmpworkdir = os.path.join(tmpdir, 'work') 153 logger.debug('fetch_url: temp dir is %s' % tmpdir) 154 155 fetchrecipedir = _get_temp_recipe_dir(tinfoil.config_data) 156 if not fetchrecipedir: 157 logger.error('Searched BBFILES but unable to find a writeable place to put temporary recipe') 158 sys.exit(1) 159 fetchrecipe = None 160 bb.utils.mkdirhier(fetchrecipedir) 161 try: 162 # Generate a dummy recipe so we can follow more or less normal paths 163 # for do_fetch and do_unpack 164 # I'd use tempfile functions here but underscores can be produced by that and those 165 # aren't allowed in recipe file names except to separate the version 166 rndstring = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(8)) 167 fetchrecipe = os.path.join(fetchrecipedir, 'tmp-recipetool-%s.bb' % rndstring) 168 fetchrecipepn = os.path.splitext(os.path.basename(fetchrecipe))[0] 169 logger.debug('Generating initial recipe %s for fetching' % fetchrecipe) 170 with open(fetchrecipe, 'w') as f: 171 # We don't want to have to specify LIC_FILES_CHKSUM 172 f.write('LICENSE = "CLOSED"\n') 173 # We don't need the cross-compiler 174 f.write('INHIBIT_DEFAULT_DEPS = "1"\n') 175 # We don't have the checksums yet so we can't require them 176 f.write('BB_STRICT_CHECKSUM = "ignore"\n') 177 f.write('SRC_URI = "%s"\n' % srcuri) 178 f.write('SRCREV = "%s"\n' % srcrev) 179 f.write('WORKDIR = "%s"\n' % tmpworkdir) 180 # Set S out of the way so it doesn't get created under the workdir 181 f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) 182 if not mirrors: 183 # We do not need PREMIRRORS since we are almost certainly 184 # fetching new source rather than something that has already 185 # been fetched. Hence, we disable them by default. 186 # However, we provide an option for users to enable it. 187 f.write('PREMIRRORS = ""\n') 188 f.write('MIRRORS = ""\n') 189 190 logger.info('Fetching %s...' % srcuri) 191 192 # FIXME this is too noisy at the moment 193 194 # Parse recipes so our new recipe gets picked up 195 tinfoil.parse_recipes() 196 197 def eventhandler(event): 198 if isinstance(event, bb.fetch2.MissingChecksumEvent): 199 checksums.update(event.checksums) 200 return True 201 return False 202 203 # Run the fetch + unpack tasks 204 res = tinfoil.build_targets(fetchrecipepn, 205 'do_unpack', 206 handle_events=True, 207 extra_events=['bb.fetch2.MissingChecksumEvent'], 208 event_callback=eventhandler) 209 if not res: 210 raise FetchUrlFailure(srcuri) 211 212 # Remove unneeded directories 213 rd = tinfoil.parse_recipe(fetchrecipepn) 214 if rd: 215 pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE'] 216 for pathvar in pathvars: 217 path = rd.getVar(pathvar) 218 shutil.rmtree(path) 219 finally: 220 if fetchrecipe: 221 try: 222 os.remove(fetchrecipe) 223 except FileNotFoundError: 224 pass 225 try: 226 os.rmdir(fetchrecipedir) 227 except OSError as e: 228 import errno 229 if e.errno != errno.ENOTEMPTY: 230 raise 231 232 bb.utils.mkdirhier(destdir) 233 for fn in os.listdir(tmpworkdir): 234 shutil.move(os.path.join(tmpworkdir, fn), destdir) 235 236 finally: 237 if not preserve_tmp: 238 shutil.rmtree(tmpdir) 239 tmpdir = None 240 241 return checksums, tmpdir 242 243 244def run_editor(fn, logger=None): 245 if isinstance(fn, str): 246 files = [fn] 247 else: 248 files = fn 249 250 editor = os.getenv('VISUAL', os.getenv('EDITOR', 'vi')) 251 try: 252 #print(shlex.split(editor) + files) 253 return subprocess.check_call(shlex.split(editor) + files) 254 except subprocess.CalledProcessError as exc: 255 logger.error("Execution of '%s' failed: %s" % (editor, exc)) 256 return 1 257 258def is_src_url(param): 259 """ 260 Check if a parameter is a URL and return True if so 261 NOTE: be careful about changing this as it will influence how devtool/recipetool command line handling works 262 """ 263 if not param: 264 return False 265 elif '://' in param: 266 return True 267 elif param.startswith('git@') or ('@' in param and param.endswith('.git')): 268 return True 269 return False 270 271def filter_src_subdirs(pth): 272 """ 273 Filter out subdirectories of initial unpacked source trees that we do not care about. 274 Used by devtool and recipetool. 275 """ 276 dirlist = os.listdir(pth) 277 filterout = ['git.indirectionsymlink', 'source-date-epoch'] 278 dirlist = [x for x in dirlist if x not in filterout] 279 return dirlist 280