1# Recipe creation tool - create command plugin 2# 3# Copyright (C) 2014-2017 Intel Corporation 4# 5# SPDX-License-Identifier: GPL-2.0-only 6# 7 8import sys 9import os 10import argparse 11import glob 12import fnmatch 13import re 14import json 15import logging 16import scriptutils 17from urllib.parse import urlparse, urldefrag, urlsplit 18import hashlib 19import bb.fetch2 20logger = logging.getLogger('recipetool') 21 22tinfoil = None 23plugins = None 24 25def log_error_cond(message, debugonly): 26 if debugonly: 27 logger.debug(message) 28 else: 29 logger.error(message) 30 31def log_info_cond(message, debugonly): 32 if debugonly: 33 logger.debug(message) 34 else: 35 logger.info(message) 36 37def plugin_init(pluginlist): 38 # Take a reference to the list so we can use it later 39 global plugins 40 plugins = pluginlist 41 42def tinfoil_init(instance): 43 global tinfoil 44 tinfoil = instance 45 46class RecipeHandler(object): 47 recipelibmap = {} 48 recipeheadermap = {} 49 recipecmakefilemap = {} 50 recipebinmap = {} 51 52 def __init__(self): 53 self._devtool = False 54 55 @staticmethod 56 def load_libmap(d): 57 '''Load library->recipe mapping''' 58 import oe.package 59 60 if RecipeHandler.recipelibmap: 61 return 62 # First build up library->package mapping 63 d2 = bb.data.createCopy(d) 64 d2.setVar("WORKDIR_PKGDATA", "${PKGDATA_DIR}") 65 shlib_providers = oe.package.read_shlib_providers(d2) 66 libdir = d.getVar('libdir') 67 base_libdir = d.getVar('base_libdir') 68 libpaths = list(set([base_libdir, libdir])) 69 libname_re = re.compile(r'^lib(.+)\.so.*$') 70 pkglibmap = {} 71 for lib, item in shlib_providers.items(): 72 for path, pkg in item.items(): 73 if path in libpaths: 74 res = libname_re.match(lib) 75 if res: 76 libname = res.group(1) 77 if not libname in pkglibmap: 78 pkglibmap[libname] = pkg[0] 79 else: 80 logger.debug('unable to extract library name from %s' % lib) 81 82 # Now turn it into a library->recipe mapping 83 pkgdata_dir = d.getVar('PKGDATA_DIR') 84 for libname, pkg in pkglibmap.items(): 85 try: 86 with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: 87 for line in f: 88 if line.startswith('PN:'): 89 RecipeHandler.recipelibmap[libname] = line.split(':', 1)[-1].strip() 90 break 91 except IOError as ioe: 92 if ioe.errno == 2: 93 logger.warning('unable to find a pkgdata file for package %s' % pkg) 94 else: 95 raise 96 97 # Some overrides - these should be mapped to the virtual 98 RecipeHandler.recipelibmap['GL'] = 'virtual/libgl' 99 RecipeHandler.recipelibmap['EGL'] = 'virtual/egl' 100 RecipeHandler.recipelibmap['GLESv2'] = 'virtual/libgles2' 101 102 @staticmethod 103 def load_devel_filemap(d): 104 '''Build up development file->recipe mapping''' 105 if RecipeHandler.recipeheadermap: 106 return 107 pkgdata_dir = d.getVar('PKGDATA_DIR') 108 includedir = d.getVar('includedir') 109 cmakedir = os.path.join(d.getVar('libdir'), 'cmake') 110 for pkg in glob.glob(os.path.join(pkgdata_dir, 'runtime', '*-dev')): 111 with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: 112 pn = None 113 headers = [] 114 cmakefiles = [] 115 for line in f: 116 if line.startswith('PN:'): 117 pn = line.split(':', 1)[-1].strip() 118 elif line.startswith('FILES_INFO:%s:' % pkg): 119 val = line.split(': ', 1)[1].strip() 120 dictval = json.loads(val) 121 for fullpth in sorted(dictval): 122 if fullpth.startswith(includedir) and fullpth.endswith('.h'): 123 headers.append(os.path.relpath(fullpth, includedir)) 124 elif fullpth.startswith(cmakedir) and fullpth.endswith('.cmake'): 125 cmakefiles.append(os.path.relpath(fullpth, cmakedir)) 126 if pn and headers: 127 for header in headers: 128 RecipeHandler.recipeheadermap[header] = pn 129 if pn and cmakefiles: 130 for fn in cmakefiles: 131 RecipeHandler.recipecmakefilemap[fn] = pn 132 133 @staticmethod 134 def load_binmap(d): 135 '''Build up native binary->recipe mapping''' 136 if RecipeHandler.recipebinmap: 137 return 138 sstate_manifests = d.getVar('SSTATE_MANIFESTS') 139 staging_bindir_native = d.getVar('STAGING_BINDIR_NATIVE') 140 build_arch = d.getVar('BUILD_ARCH') 141 fileprefix = 'manifest-%s-' % build_arch 142 for fn in glob.glob(os.path.join(sstate_manifests, '%s*-native.populate_sysroot' % fileprefix)): 143 with open(fn, 'r') as f: 144 pn = os.path.basename(fn).rsplit('.', 1)[0][len(fileprefix):] 145 for line in f: 146 if line.startswith(staging_bindir_native): 147 prog = os.path.basename(line.rstrip()) 148 RecipeHandler.recipebinmap[prog] = pn 149 150 @staticmethod 151 def checkfiles(path, speclist, recursive=False, excludedirs=None): 152 results = [] 153 if recursive: 154 for root, dirs, files in os.walk(path, topdown=True): 155 if excludedirs: 156 dirs[:] = [d for d in dirs if d not in excludedirs] 157 for fn in files: 158 for spec in speclist: 159 if fnmatch.fnmatch(fn, spec): 160 results.append(os.path.join(root, fn)) 161 else: 162 for spec in speclist: 163 results.extend(glob.glob(os.path.join(path, spec))) 164 return results 165 166 @staticmethod 167 def handle_depends(libdeps, pcdeps, deps, outlines, values, d): 168 if pcdeps: 169 recipemap = read_pkgconfig_provides(d) 170 if libdeps: 171 RecipeHandler.load_libmap(d) 172 173 ignorelibs = ['socket'] 174 ignoredeps = ['gcc-runtime', 'glibc', 'uclibc', 'musl', 'tar-native', 'binutils-native', 'coreutils-native'] 175 176 unmappedpc = [] 177 pcdeps = list(set(pcdeps)) 178 for pcdep in pcdeps: 179 if isinstance(pcdep, str): 180 recipe = recipemap.get(pcdep, None) 181 if recipe: 182 deps.append(recipe) 183 else: 184 if not pcdep.startswith('$'): 185 unmappedpc.append(pcdep) 186 else: 187 for item in pcdep: 188 recipe = recipemap.get(pcdep, None) 189 if recipe: 190 deps.append(recipe) 191 break 192 else: 193 unmappedpc.append('(%s)' % ' or '.join(pcdep)) 194 195 unmappedlibs = [] 196 for libdep in libdeps: 197 if isinstance(libdep, tuple): 198 lib, header = libdep 199 else: 200 lib = libdep 201 header = None 202 203 if lib in ignorelibs: 204 logger.debug('Ignoring library dependency %s' % lib) 205 continue 206 207 recipe = RecipeHandler.recipelibmap.get(lib, None) 208 if recipe: 209 deps.append(recipe) 210 elif recipe is None: 211 if header: 212 RecipeHandler.load_devel_filemap(d) 213 recipe = RecipeHandler.recipeheadermap.get(header, None) 214 if recipe: 215 deps.append(recipe) 216 elif recipe is None: 217 unmappedlibs.append(lib) 218 else: 219 unmappedlibs.append(lib) 220 221 deps = set(deps).difference(set(ignoredeps)) 222 223 if unmappedpc: 224 outlines.append('# NOTE: unable to map the following pkg-config dependencies: %s' % ' '.join(unmappedpc)) 225 outlines.append('# (this is based on recipes that have previously been built and packaged)') 226 227 if unmappedlibs: 228 outlines.append('# NOTE: the following library dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmappedlibs)))) 229 outlines.append('# (this is based on recipes that have previously been built and packaged)') 230 231 if deps: 232 values['DEPENDS'] = ' '.join(deps) 233 234 @staticmethod 235 def genfunction(outlines, funcname, content, python=False, forcespace=False): 236 if python: 237 prefix = 'python ' 238 else: 239 prefix = '' 240 outlines.append('%s%s () {' % (prefix, funcname)) 241 if python or forcespace: 242 indent = ' ' 243 else: 244 indent = '\t' 245 addnoop = not python 246 for line in content: 247 outlines.append('%s%s' % (indent, line)) 248 if addnoop: 249 strippedline = line.lstrip() 250 if strippedline and not strippedline.startswith('#'): 251 addnoop = False 252 if addnoop: 253 # Without this there'll be a syntax error 254 outlines.append('%s:' % indent) 255 outlines.append('}') 256 outlines.append('') 257 258 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): 259 return False 260 261 262def validate_pv(pv): 263 if not pv or '_version' in pv.lower() or pv[0] not in '0123456789': 264 return False 265 return True 266 267def determine_from_filename(srcfile): 268 """Determine name and version from a filename""" 269 if is_package(srcfile): 270 # Force getting the value from the package metadata 271 return None, None 272 273 if '.tar.' in srcfile: 274 namepart = srcfile.split('.tar.')[0] 275 else: 276 namepart = os.path.splitext(srcfile)[0] 277 namepart = namepart.lower().replace('_', '-') 278 if namepart.endswith('.src'): 279 namepart = namepart[:-4] 280 if namepart.endswith('.orig'): 281 namepart = namepart[:-5] 282 splitval = namepart.split('-') 283 logger.debug('determine_from_filename: split name %s into: %s' % (srcfile, splitval)) 284 285 ver_re = re.compile('^v?[0-9]') 286 287 pv = None 288 pn = None 289 if len(splitval) == 1: 290 # Try to split the version out if there is no separator (or a .) 291 res = re.match('^([^0-9]+)([0-9.]+.*)$', namepart) 292 if res: 293 if len(res.group(1)) > 1 and len(res.group(2)) > 1: 294 pn = res.group(1).rstrip('.') 295 pv = res.group(2) 296 else: 297 pn = namepart 298 else: 299 if splitval[-1] in ['source', 'src']: 300 splitval.pop() 301 if len(splitval) > 2 and re.match('^(alpha|beta|stable|release|rc[0-9]|pre[0-9]|p[0-9]|[0-9]{8})', splitval[-1]) and ver_re.match(splitval[-2]): 302 pv = '-'.join(splitval[-2:]) 303 if pv.endswith('-release'): 304 pv = pv[:-8] 305 splitval = splitval[:-2] 306 elif ver_re.match(splitval[-1]): 307 pv = splitval.pop() 308 pn = '-'.join(splitval) 309 if pv and pv.startswith('v'): 310 pv = pv[1:] 311 logger.debug('determine_from_filename: name = "%s" version = "%s"' % (pn, pv)) 312 return (pn, pv) 313 314def determine_from_url(srcuri): 315 """Determine name and version from a URL""" 316 pn = None 317 pv = None 318 parseres = urlparse(srcuri.lower().split(';', 1)[0]) 319 if parseres.path: 320 if 'github.com' in parseres.netloc: 321 res = re.search(r'.*/(.*?)/archive/(.*)-final\.(tar|zip)', parseres.path) 322 if res: 323 pn = res.group(1).strip().replace('_', '-') 324 pv = res.group(2).strip().replace('_', '.') 325 else: 326 res = re.search(r'.*/(.*?)/archive/v?(.*)\.(tar|zip)', parseres.path) 327 if res: 328 pn = res.group(1).strip().replace('_', '-') 329 pv = res.group(2).strip().replace('_', '.') 330 elif 'bitbucket.org' in parseres.netloc: 331 res = re.search(r'.*/(.*?)/get/[a-zA-Z_-]*([0-9][0-9a-zA-Z_.]*)\.(tar|zip)', parseres.path) 332 if res: 333 pn = res.group(1).strip().replace('_', '-') 334 pv = res.group(2).strip().replace('_', '.') 335 336 if not pn and not pv: 337 if parseres.scheme not in ['git', 'gitsm', 'svn', 'hg']: 338 srcfile = os.path.basename(parseres.path.rstrip('/')) 339 pn, pv = determine_from_filename(srcfile) 340 elif parseres.scheme in ['git', 'gitsm']: 341 pn = os.path.basename(parseres.path.rstrip('/')).lower().replace('_', '-') 342 if pn.endswith('.git'): 343 pn = pn[:-4] 344 345 logger.debug('Determined from source URL: name = "%s", version = "%s"' % (pn, pv)) 346 return (pn, pv) 347 348def supports_srcrev(uri): 349 localdata = bb.data.createCopy(tinfoil.config_data) 350 # This is a bit sad, but if you don't have this set there can be some 351 # odd interactions with the urldata cache which lead to errors 352 localdata.setVar('SRCREV', '${AUTOREV}') 353 try: 354 fetcher = bb.fetch2.Fetch([uri], localdata) 355 urldata = fetcher.ud 356 for u in urldata: 357 if urldata[u].method.supports_srcrev(): 358 return True 359 except bb.fetch2.FetchError as e: 360 logger.debug('FetchError in supports_srcrev: %s' % str(e)) 361 # Fall back to basic check 362 if uri.startswith(('git://', 'gitsm://')): 363 return True 364 return False 365 366def reformat_git_uri(uri): 367 '''Convert any http[s]://....git URI into git://...;protocol=http[s]''' 368 checkuri = uri.split(';', 1)[0] 369 if checkuri.endswith('.git') or '/git/' in checkuri or re.match('https?://git(hub|lab).com/[^/]+/[^/]+/?$', checkuri): 370 # Appends scheme if the scheme is missing 371 if not '://' in uri: 372 uri = 'git://' + uri 373 scheme, host, path, user, pswd, parms = bb.fetch2.decodeurl(uri) 374 # Detection mechanism, this is required due to certain URL are formatter with ":" rather than "/" 375 # which causes decodeurl to fail getting the right host and path 376 if len(host.split(':')) > 1: 377 splitslash = host.split(':') 378 # Port number should not be split from host 379 if not re.match('^[0-9]+$', splitslash[1]): 380 host = splitslash[0] 381 path = '/' + splitslash[1] + path 382 #Algorithm: 383 # if user is defined, append protocol=ssh or if a protocol is defined, then honor the user-defined protocol 384 # if no user & password is defined, check for scheme type and append the protocol with the scheme type 385 # finally if protocols or if the url is well-formed, do nothing and rejoin everything back to normal 386 # Need to repackage the arguments for encodeurl, the format is: (scheme, host, path, user, password, OrderedDict([('key', 'value')])) 387 if user: 388 if not 'protocol' in parms: 389 parms.update({('protocol', 'ssh')}) 390 elif (scheme == "http" or scheme == 'https' or scheme == 'ssh') and not ('protocol' in parms): 391 parms.update({('protocol', scheme)}) 392 # Always append 'git://' 393 fUrl = bb.fetch2.encodeurl(('git', host, path, user, pswd, parms)) 394 return fUrl 395 else: 396 return uri 397 398def is_package(url): 399 '''Check if a URL points to a package''' 400 checkurl = url.split(';', 1)[0] 401 if checkurl.endswith(('.deb', '.ipk', '.rpm', '.srpm')): 402 return True 403 return False 404 405def create_recipe(args): 406 import bb.process 407 import tempfile 408 import shutil 409 import oe.recipeutils 410 411 pkgarch = "" 412 if args.machine: 413 pkgarch = "${MACHINE_ARCH}" 414 415 extravalues = {} 416 checksums = {} 417 tempsrc = '' 418 source = args.source 419 srcsubdir = '' 420 srcrev = '${AUTOREV}' 421 srcbranch = '' 422 scheme = '' 423 storeTagName = '' 424 pv_srcpv = False 425 426 handled = [] 427 classes = [] 428 429 # Find all plugins that want to register handlers 430 logger.debug('Loading recipe handlers') 431 raw_handlers = [] 432 for plugin in plugins: 433 if hasattr(plugin, 'register_recipe_handlers'): 434 plugin.register_recipe_handlers(raw_handlers) 435 # Sort handlers by priority 436 handlers = [] 437 for i, handler in enumerate(raw_handlers): 438 if isinstance(handler, tuple): 439 handlers.append((handler[0], handler[1], i)) 440 else: 441 handlers.append((handler, 0, i)) 442 handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True) 443 for handler, priority, _ in handlers: 444 logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority)) 445 setattr(handler, '_devtool', args.devtool) 446 handlers = [item[0] for item in handlers] 447 448 fetchuri = None 449 for handler in handlers: 450 if hasattr(handler, 'process_url'): 451 ret = handler.process_url(args, classes, handled, extravalues) 452 if 'url' in handled and ret: 453 fetchuri = ret 454 break 455 456 if os.path.isfile(source): 457 source = 'file://%s' % os.path.abspath(source) 458 459 if scriptutils.is_src_url(source): 460 # Warn about github archive URLs 461 if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source): 462 logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).') 463 # Fetch a URL 464 if not fetchuri: 465 fetchuri = reformat_git_uri(urldefrag(source)[0]) 466 if args.binary: 467 # Assume the archive contains the directory structure verbatim 468 # so we need to extract to a subdirectory 469 fetchuri += ';subdir=${BPN}' 470 srcuri = fetchuri 471 rev_re = re.compile(';rev=([^;]+)') 472 res = rev_re.search(srcuri) 473 if res: 474 if args.srcrev: 475 logger.error('rev= parameter and -S/--srcrev option cannot both be specified - use one or the other') 476 sys.exit(1) 477 if args.autorev: 478 logger.error('rev= parameter and -a/--autorev option cannot both be specified - use one or the other') 479 sys.exit(1) 480 srcrev = res.group(1) 481 srcuri = rev_re.sub('', srcuri) 482 elif args.srcrev: 483 srcrev = args.srcrev 484 485 # Check whether users provides any branch info in fetchuri. 486 # If true, we will skip all branch checking process to honor all user's input. 487 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(fetchuri) 488 srcbranch = params.get('branch') 489 if args.srcbranch: 490 if srcbranch: 491 logger.error('branch= parameter and -B/--srcbranch option cannot both be specified - use one or the other') 492 sys.exit(1) 493 srcbranch = args.srcbranch 494 params['branch'] = srcbranch 495 nobranch = params.get('nobranch') 496 if nobranch and srcbranch: 497 logger.error('nobranch= cannot be used if you specify a branch') 498 sys.exit(1) 499 tag = params.get('tag') 500 if not srcbranch and not nobranch and srcrev != '${AUTOREV}': 501 # Append nobranch=1 in the following conditions: 502 # 1. User did not set 'branch=' in srcuri, and 503 # 2. User did not set 'nobranch=1' in srcuri, and 504 # 3. Source revision is not '${AUTOREV}' 505 params['nobranch'] = '1' 506 if tag: 507 # Keep a copy of tag and append nobranch=1 then remove tag from URL. 508 # Bitbake fetcher unable to fetch when {AUTOREV} and tag is set at the same time. 509 storeTagName = params['tag'] 510 params['nobranch'] = '1' 511 del params['tag'] 512 # Assume 'master' branch if not set 513 if scheme in ['git', 'gitsm'] and 'branch' not in params and 'nobranch' not in params: 514 params['branch'] = 'master' 515 fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) 516 517 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') 518 bb.utils.mkdirhier(tmpparent) 519 tempsrc = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent) 520 srctree = os.path.join(tempsrc, 'source') 521 522 try: 523 checksums, ftmpdir = scriptutils.fetch_url(tinfoil, fetchuri, srcrev, srctree, logger, preserve_tmp=args.keep_temp) 524 except scriptutils.FetchUrlFailure as e: 525 logger.error(str(e)) 526 sys.exit(1) 527 528 if ftmpdir and args.keep_temp: 529 logger.info('Fetch temp directory is %s' % ftmpdir) 530 531 dirlist = scriptutils.filter_src_subdirs(srctree) 532 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) 533 if len(dirlist) == 1: 534 singleitem = os.path.join(srctree, dirlist[0]) 535 if os.path.isdir(singleitem): 536 # We unpacked a single directory, so we should use that 537 srcsubdir = dirlist[0] 538 srctree = os.path.join(srctree, srcsubdir) 539 else: 540 check_single_file(dirlist[0], fetchuri) 541 elif len(dirlist) == 0: 542 if '/' in fetchuri: 543 fn = os.path.join(tinfoil.config_data.getVar('DL_DIR'), fetchuri.split('/')[-1]) 544 if os.path.isfile(fn): 545 check_single_file(fn, fetchuri) 546 # If we've got to here then there's no source so we might as well give up 547 logger.error('URL %s resulted in an empty source tree' % fetchuri) 548 sys.exit(1) 549 550 # We need this checking mechanism to improve the recipe created by recipetool and devtool 551 # is able to parse and build by bitbake. 552 # If there is no input for branch name, then check for branch name with SRCREV provided. 553 if not srcbranch and not nobranch and srcrev and (srcrev != '${AUTOREV}') and scheme in ['git', 'gitsm']: 554 try: 555 cmd = 'git branch -r --contains' 556 check_branch, check_branch_err = bb.process.run('%s %s' % (cmd, srcrev), cwd=srctree) 557 except bb.process.ExecutionError as err: 558 logger.error(str(err)) 559 sys.exit(1) 560 get_branch = [x.strip() for x in check_branch.splitlines()] 561 # Remove HEAD reference point and drop remote prefix 562 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] 563 if 'master' in get_branch: 564 # Even with the case where get_branch has multiple objects, if 'master' is one 565 # of them, we should default take from 'master' 566 srcbranch = 'master' 567 elif len(get_branch) == 1: 568 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' 569 srcbranch = get_branch[0] 570 else: 571 # If get_branch contains more than one objects, then display error and exit. 572 mbrch = '\n ' + '\n '.join(get_branch) 573 logger.error('Revision %s was found on multiple branches: %s\nPlease provide the correct branch with -B/--srcbranch' % (srcrev, mbrch)) 574 sys.exit(1) 575 576 # Since we might have a value in srcbranch, we need to 577 # recontruct the srcuri to include 'branch' in params. 578 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri) 579 if scheme in ['git', 'gitsm']: 580 params['branch'] = srcbranch or 'master' 581 582 if storeTagName and scheme in ['git', 'gitsm']: 583 # Check srcrev using tag and check validity of the tag 584 cmd = ('git rev-parse --verify %s' % (storeTagName)) 585 try: 586 check_tag, check_tag_err = bb.process.run('%s' % cmd, cwd=srctree) 587 srcrev = check_tag.split()[0] 588 except bb.process.ExecutionError as err: 589 logger.error(str(err)) 590 logger.error("Possibly wrong tag name is provided") 591 sys.exit(1) 592 # Drop tag from srcuri as it will have conflicts with SRCREV during recipe parse. 593 del params['tag'] 594 srcuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) 595 596 if os.path.exists(os.path.join(srctree, '.gitmodules')) and srcuri.startswith('git://'): 597 srcuri = 'gitsm://' + srcuri[6:] 598 logger.info('Fetching submodules...') 599 bb.process.run('git submodule update --init --recursive', cwd=srctree) 600 601 if is_package(fetchuri): 602 localdata = bb.data.createCopy(tinfoil.config_data) 603 pkgfile = bb.fetch2.localpath(fetchuri, localdata) 604 if pkgfile: 605 tmpfdir = tempfile.mkdtemp(prefix='recipetool-') 606 try: 607 if pkgfile.endswith(('.deb', '.ipk')): 608 stdout, _ = bb.process.run('ar x %s' % pkgfile, cwd=tmpfdir) 609 stdout, _ = bb.process.run('tar xf control.tar.gz', cwd=tmpfdir) 610 values = convert_debian(tmpfdir) 611 extravalues.update(values) 612 elif pkgfile.endswith(('.rpm', '.srpm')): 613 stdout, _ = bb.process.run('rpm -qp --xml %s > pkginfo.xml' % pkgfile, cwd=tmpfdir) 614 values = convert_rpm_xml(os.path.join(tmpfdir, 'pkginfo.xml')) 615 extravalues.update(values) 616 finally: 617 shutil.rmtree(tmpfdir) 618 else: 619 # Assume we're pointing to an existing source tree 620 if args.extract_to: 621 logger.error('--extract-to cannot be specified if source is a directory') 622 sys.exit(1) 623 if not os.path.isdir(source): 624 logger.error('Invalid source directory %s' % source) 625 sys.exit(1) 626 srctree = source 627 srcuri = '' 628 if os.path.exists(os.path.join(srctree, '.git')): 629 # Try to get upstream repo location from origin remote 630 try: 631 stdout, _ = bb.process.run('git remote -v', cwd=srctree, shell=True) 632 except bb.process.ExecutionError as e: 633 stdout = None 634 if stdout: 635 for line in stdout.splitlines(): 636 splitline = line.split() 637 if len(splitline) > 1: 638 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): 639 srcuri = reformat_git_uri(splitline[1]) + ';branch=master' 640 srcsubdir = 'git' 641 break 642 643 if args.src_subdir: 644 srcsubdir = os.path.join(srcsubdir, args.src_subdir) 645 srctree_use = os.path.abspath(os.path.join(srctree, args.src_subdir)) 646 else: 647 srctree_use = os.path.abspath(srctree) 648 649 if args.outfile and os.path.isdir(args.outfile): 650 outfile = None 651 outdir = args.outfile 652 else: 653 outfile = args.outfile 654 outdir = None 655 if outfile and outfile != '-': 656 if os.path.exists(outfile): 657 logger.error('Output file %s already exists' % outfile) 658 sys.exit(1) 659 660 lines_before = [] 661 lines_after = [] 662 663 lines_before.append('# Recipe created by %s' % os.path.basename(sys.argv[0])) 664 lines_before.append('# This is the basis of a recipe and may need further editing in order to be fully functional.') 665 lines_before.append('# (Feel free to remove these comments when editing.)') 666 # We need a blank line here so that patch_recipe_lines can rewind before the LICENSE comments 667 lines_before.append('') 668 669 # We'll come back and replace this later in handle_license_vars() 670 lines_before.append('##LICENSE_PLACEHOLDER##') 671 672 673 # FIXME This is kind of a hack, we probably ought to be using bitbake to do this 674 pn = None 675 pv = None 676 if outfile: 677 recipefn = os.path.splitext(os.path.basename(outfile))[0] 678 fnsplit = recipefn.split('_') 679 if len(fnsplit) > 1: 680 pn = fnsplit[0] 681 pv = fnsplit[1] 682 else: 683 pn = recipefn 684 685 if args.version: 686 pv = args.version 687 688 if args.name: 689 pn = args.name 690 if args.name.endswith('-native'): 691 if args.also_native: 692 logger.error('--also-native cannot be specified for a recipe named *-native (*-native denotes a recipe that is already only for native) - either remove the -native suffix from the name or drop --also-native') 693 sys.exit(1) 694 classes.append('native') 695 elif args.name.startswith('nativesdk-'): 696 if args.also_native: 697 logger.error('--also-native cannot be specified for a recipe named nativesdk-* (nativesdk-* denotes a recipe that is already only for nativesdk)') 698 sys.exit(1) 699 classes.append('nativesdk') 700 701 if pv and pv not in 'git svn hg'.split(): 702 realpv = pv 703 else: 704 realpv = None 705 706 if not srcuri: 707 lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)') 708 lines_before.append('SRC_URI = "%s"' % srcuri) 709 shown_checksums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST] 710 for key, value in sorted(checksums.items()): 711 if key in shown_checksums: 712 lines_before.append('SRC_URI[%s] = "%s"' % (key, value)) 713 if srcuri and supports_srcrev(srcuri): 714 lines_before.append('') 715 lines_before.append('# Modify these as desired') 716 # Note: we have code to replace realpv further down if it gets set to some other value 717 scheme, _, _, _, _, _ = bb.fetch2.decodeurl(srcuri) 718 if scheme in ['git', 'gitsm']: 719 srcpvprefix = 'git' 720 elif scheme == 'svn': 721 srcpvprefix = 'svnr' 722 else: 723 srcpvprefix = scheme 724 lines_before.append('PV = "%s+%s"' % (realpv or '1.0', srcpvprefix)) 725 pv_srcpv = True 726 if not args.autorev and srcrev == '${AUTOREV}': 727 if os.path.exists(os.path.join(srctree, '.git')): 728 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) 729 srcrev = stdout.rstrip() 730 lines_before.append('SRCREV = "%s"' % srcrev) 731 if args.provides: 732 lines_before.append('PROVIDES = "%s"' % args.provides) 733 lines_before.append('') 734 735 if srcsubdir and not args.binary: 736 # (for binary packages we explicitly specify subdir= when fetching to 737 # match the default value of S, so we don't need to set it in that case) 738 lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir) 739 lines_before.append('') 740 741 if pkgarch: 742 lines_after.append('PACKAGE_ARCH = "%s"' % pkgarch) 743 lines_after.append('') 744 745 if args.binary: 746 lines_after.append('INSANE_SKIP:${PN} += "already-stripped"') 747 lines_after.append('') 748 749 if args.npm_dev: 750 extravalues['NPM_INSTALL_DEV'] = 1 751 752 # Apply the handlers 753 if args.binary: 754 classes.append('bin_package') 755 handled.append('buildsystem') 756 757 for handler in handlers: 758 handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues) 759 760 # native and nativesdk classes are special and must be inherited last 761 # If present, put them at the end of the classes list 762 classes.sort(key=lambda c: c in ("native", "nativesdk")) 763 764 extrafiles = extravalues.pop('extrafiles', {}) 765 extra_pn = extravalues.pop('PN', None) 766 extra_pv = extravalues.pop('PV', None) 767 768 if extra_pv and not realpv: 769 realpv = extra_pv 770 if not validate_pv(realpv): 771 realpv = None 772 else: 773 realpv = realpv.lower().split()[0] 774 if '_' in realpv: 775 realpv = realpv.replace('_', '-') 776 if extra_pn and not pn: 777 pn = extra_pn 778 if pn.startswith('GNU '): 779 pn = pn[4:] 780 if ' ' in pn: 781 # Probably a descriptive identifier rather than a proper name 782 pn = None 783 else: 784 pn = pn.lower() 785 if '_' in pn: 786 pn = pn.replace('_', '-') 787 788 if srcuri and not realpv or not pn: 789 name_pn, name_pv = determine_from_url(srcuri) 790 if name_pn and not pn: 791 pn = name_pn 792 if name_pv and not realpv: 793 realpv = name_pv 794 795 licvalues = handle_license_vars(srctree_use, lines_before, handled, extravalues, tinfoil.config_data) 796 797 if not outfile: 798 if not pn: 799 log_error_cond('Unable to determine short program name from source tree - please specify name with -N/--name or output file name with -o/--outfile', args.devtool) 800 # devtool looks for this specific exit code, so don't change it 801 sys.exit(15) 802 else: 803 if srcuri and srcuri.startswith(('gitsm://', 'git://', 'hg://', 'svn://')): 804 suffix = srcuri.split(':', 1)[0] 805 if suffix == 'gitsm': 806 suffix = 'git' 807 outfile = '%s_%s.bb' % (pn, suffix) 808 elif realpv: 809 outfile = '%s_%s.bb' % (pn, realpv) 810 else: 811 outfile = '%s.bb' % pn 812 if outdir: 813 outfile = os.path.join(outdir, outfile) 814 # We need to check this again 815 if os.path.exists(outfile): 816 logger.error('Output file %s already exists' % outfile) 817 sys.exit(1) 818 819 # Move any extra files the plugins created to a directory next to the recipe 820 if extrafiles: 821 if outfile == '-': 822 extraoutdir = pn 823 else: 824 extraoutdir = os.path.join(os.path.dirname(outfile), pn) 825 bb.utils.mkdirhier(extraoutdir) 826 for destfn, extrafile in extrafiles.items(): 827 shutil.move(extrafile, os.path.join(extraoutdir, destfn)) 828 829 lines = lines_before 830 lines_before = [] 831 skipblank = True 832 for line in lines: 833 if skipblank: 834 skipblank = False 835 if not line: 836 continue 837 if line.startswith('S = '): 838 if realpv and pv not in 'git svn hg'.split(): 839 line = line.replace(realpv, '${PV}') 840 if pn: 841 line = line.replace(pn, '${BPN}') 842 if line == 'S = "${WORKDIR}/${BPN}-${PV}"': 843 skipblank = True 844 continue 845 elif line.startswith('SRC_URI = '): 846 if realpv and not pv_srcpv: 847 line = line.replace(realpv, '${PV}') 848 elif line.startswith('PV = '): 849 if realpv: 850 # Replace the first part of the PV value 851 line = re.sub(r'"[^+]*\+', '"%s+' % realpv, line) 852 lines_before.append(line) 853 854 if args.also_native: 855 lines = lines_after 856 lines_after = [] 857 bbclassextend = None 858 for line in lines: 859 if line.startswith('BBCLASSEXTEND ='): 860 splitval = line.split('"') 861 if len(splitval) > 1: 862 bbclassextend = splitval[1].split() 863 if not 'native' in bbclassextend: 864 bbclassextend.insert(0, 'native') 865 line = 'BBCLASSEXTEND = "%s"' % ' '.join(bbclassextend) 866 lines_after.append(line) 867 if not bbclassextend: 868 lines_after.append('BBCLASSEXTEND = "native"') 869 870 postinst = ("postinst", extravalues.pop('postinst', None)) 871 postrm = ("postrm", extravalues.pop('postrm', None)) 872 preinst = ("preinst", extravalues.pop('preinst', None)) 873 prerm = ("prerm", extravalues.pop('prerm', None)) 874 funcs = [postinst, postrm, preinst, prerm] 875 for func in funcs: 876 if func[1]: 877 RecipeHandler.genfunction(lines_after, 'pkg_%s_${PN}' % func[0], func[1]) 878 879 outlines = [] 880 outlines.extend(lines_before) 881 if classes: 882 if outlines[-1] and not outlines[-1].startswith('#'): 883 outlines.append('') 884 outlines.append('inherit %s' % ' '.join(classes)) 885 outlines.append('') 886 outlines.extend(lines_after) 887 888 outlines = [ line.rstrip('\n') +"\n" for line in outlines] 889 890 if extravalues: 891 _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=True) 892 893 if args.extract_to: 894 scriptutils.git_convert_standalone_clone(srctree) 895 if os.path.isdir(args.extract_to): 896 # If the directory exists we'll move the temp dir into it instead of 897 # its contents - of course, we could try to always move its contents 898 # but that is a pain if there are symlinks; the simplest solution is 899 # to just remove it first 900 os.rmdir(args.extract_to) 901 shutil.move(srctree, args.extract_to) 902 if tempsrc == srctree: 903 tempsrc = None 904 log_info_cond('Source extracted to %s' % args.extract_to, args.devtool) 905 906 if outfile == '-': 907 sys.stdout.write(''.join(outlines) + '\n') 908 else: 909 with open(outfile, 'w') as f: 910 lastline = None 911 for line in outlines: 912 if not lastline and not line: 913 # Skip extra blank lines 914 continue 915 f.write('%s' % line) 916 lastline = line 917 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) 918 tinfoil.modified_files() 919 920 if tempsrc: 921 if args.keep_temp: 922 logger.info('Preserving temporary directory %s' % tempsrc) 923 else: 924 shutil.rmtree(tempsrc) 925 926 return 0 927 928def check_single_file(fn, fetchuri): 929 """Determine if a single downloaded file is something we can't handle""" 930 with open(fn, 'r', errors='surrogateescape') as f: 931 if '<html' in f.read(100).lower(): 932 logger.error('Fetching "%s" returned a single HTML page - check the URL is correct and functional' % fetchuri) 933 sys.exit(1) 934 935def split_value(value): 936 if isinstance(value, str): 937 return value.split() 938 else: 939 return value 940 941def fixup_license(value): 942 # Ensure licenses with OR starts and ends with brackets 943 if '|' in value: 944 return '(' + value + ')' 945 return value 946 947def tidy_licenses(value): 948 """Flat, split and sort licenses""" 949 from oe.license import flattened_licenses 950 def _choose(a, b): 951 str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold) 952 return ["(%s | %s)" % (str_a, str_b)] 953 if not isinstance(value, str): 954 value = " & ".join(value) 955 return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold) 956 957def handle_license_vars(srctree, lines_before, handled, extravalues, d): 958 lichandled = [x for x in handled if x[0] == 'license'] 959 if lichandled: 960 # Someone else has already handled the license vars, just return their value 961 return lichandled[0][1] 962 963 licvalues = guess_license(srctree, d) 964 licenses = [] 965 lic_files_chksum = [] 966 lic_unknown = [] 967 lines = [] 968 if licvalues: 969 for licvalue in licvalues: 970 license = licvalue[0] 971 lics = tidy_licenses(fixup_license(license)) 972 lics = [lic for lic in lics if lic not in licenses] 973 if len(lics): 974 licenses.extend(lics) 975 lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2])) 976 if license == 'Unknown': 977 lic_unknown.append(licvalue[1]) 978 if lic_unknown: 979 lines.append('#') 980 lines.append('# The following license files were not able to be identified and are') 981 lines.append('# represented as "Unknown" below, you will need to check them yourself:') 982 for licfile in lic_unknown: 983 lines.append('# %s' % licfile) 984 985 extra_license = tidy_licenses(extravalues.pop('LICENSE', '')) 986 if extra_license: 987 if licenses == ['Unknown']: 988 licenses = extra_license 989 else: 990 for item in extra_license: 991 if item not in licenses: 992 licenses.append(item) 993 extra_lic_files_chksum = split_value(extravalues.pop('LIC_FILES_CHKSUM', [])) 994 for item in extra_lic_files_chksum: 995 if item not in lic_files_chksum: 996 lic_files_chksum.append(item) 997 998 if lic_files_chksum: 999 # We are going to set the vars, so prepend the standard disclaimer 1000 lines.insert(0, '# WARNING: the following LICENSE and LIC_FILES_CHKSUM values are best guesses - it is') 1001 lines.insert(1, '# your responsibility to verify that the values are complete and correct.') 1002 else: 1003 # Without LIC_FILES_CHKSUM we set LICENSE = "CLOSED" to allow the 1004 # user to get started easily 1005 lines.append('# Unable to find any files that looked like license statements. Check the accompanying') 1006 lines.append('# documentation and source headers and set LICENSE and LIC_FILES_CHKSUM accordingly.') 1007 lines.append('#') 1008 lines.append('# NOTE: LICENSE is being set to "CLOSED" to allow you to at least start building - if') 1009 lines.append('# this is not accurate with respect to the licensing of the software being built (it') 1010 lines.append('# will not be in most cases) you must specify the correct value before using this') 1011 lines.append('# recipe for anything other than initial testing/development!') 1012 licenses = ['CLOSED'] 1013 1014 if extra_license and sorted(licenses) != sorted(extra_license): 1015 lines.append('# NOTE: Original package / source metadata indicates license is: %s' % ' & '.join(extra_license)) 1016 1017 if len(licenses) > 1: 1018 lines.append('#') 1019 lines.append('# NOTE: multiple licenses have been detected; they have been separated with &') 1020 lines.append('# in the LICENSE value for now since it is a reasonable assumption that all') 1021 lines.append('# of the licenses apply. If instead there is a choice between the multiple') 1022 lines.append('# licenses then you should change the value to separate the licenses with |') 1023 lines.append('# instead of &. If there is any doubt, check the accompanying documentation') 1024 lines.append('# to determine which situation is applicable.') 1025 1026 lines.append('LICENSE = "%s"' % ' & '.join(sorted(licenses, key=str.casefold))) 1027 lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum)) 1028 lines.append('') 1029 1030 # Replace the placeholder so we get the values in the right place in the recipe file 1031 try: 1032 pos = lines_before.index('##LICENSE_PLACEHOLDER##') 1033 except ValueError: 1034 pos = -1 1035 if pos == -1: 1036 lines_before.extend(lines) 1037 else: 1038 lines_before[pos:pos+1] = lines 1039 1040 handled.append(('license', licvalues)) 1041 return licvalues 1042 1043def get_license_md5sums(d, static_only=False, linenumbers=False): 1044 import bb.utils 1045 import csv 1046 md5sums = {} 1047 if not static_only and not linenumbers: 1048 # Gather md5sums of license files in common license dir 1049 commonlicdir = d.getVar('COMMON_LICENSE_DIR') 1050 for fn in os.listdir(commonlicdir): 1051 md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn)) 1052 md5sums[md5value] = fn 1053 1054 # The following were extracted from common values in various recipes 1055 # (double checking the license against the license file itself, not just 1056 # the LICENSE value in the recipe) 1057 1058 # Read license md5sums from csv file 1059 scripts_path = os.path.dirname(os.path.realpath(__file__)) 1060 for path in (d.getVar('BBPATH').split(':') 1061 + [os.path.join(scripts_path, '..', '..')]): 1062 csv_path = os.path.join(path, 'lib', 'recipetool', 'licenses.csv') 1063 if os.path.isfile(csv_path): 1064 with open(csv_path, newline='') as csv_file: 1065 fieldnames = ['md5sum', 'license', 'beginline', 'endline', 'md5'] 1066 reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames) 1067 for row in reader: 1068 if linenumbers: 1069 md5sums[row['md5sum']] = ( 1070 row['license'], row['beginline'], row['endline'], row['md5']) 1071 else: 1072 md5sums[row['md5sum']] = row['license'] 1073 1074 return md5sums 1075 1076def crunch_known_licenses(d): 1077 ''' 1078 Calculate the MD5 checksums for the crunched versions of all common 1079 licenses. Also add additional known checksums. 1080 ''' 1081 1082 crunched_md5sums = {} 1083 1084 # common licenses 1085 crunched_md5sums['ad4e9d34a2e966dfe9837f18de03266d'] = 'GFDL-1.1-only' 1086 crunched_md5sums['d014fb11a34eb67dc717fdcfc97e60ed'] = 'GFDL-1.2-only' 1087 crunched_md5sums['e020ca655b06c112def28e597ab844f1'] = 'GFDL-1.3-only' 1088 1089 # The following two were gleaned from the "forever" npm package 1090 crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC' 1091 # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt 1092 crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause' 1093 # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE 1094 crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPL-2.0-only' 1095 # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt 1096 crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPL-2.0-only' 1097 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1 1098 crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPL-2.1-only' 1099 # unixODBC-2.3.4 COPYING 1100 crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPL-2.1-only' 1101 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3 1102 crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPL-3.0-only' 1103 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10 1104 crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0' 1105 1106 # https://raw.githubusercontent.com/jquery/esprima/3.1.3/LICENSE.BSD 1107 crunched_md5sums['80fa7b56a28e8c902e6af194003220a5'] = 'BSD-2-Clause' 1108 # https://raw.githubusercontent.com/npm/npm-install-checks/master/LICENSE 1109 crunched_md5sums['e659f77bfd9002659e112d0d3d59b2c1'] = 'BSD-2-Clause' 1110 # https://raw.githubusercontent.com/silverwind/default-gateway/4.2.0/LICENSE 1111 crunched_md5sums['4c641f2d995c47f5cb08bdb4b5b6ea05'] = 'BSD-2-Clause' 1112 # https://raw.githubusercontent.com/tad-lispy/node-damerau-levenshtein/v1.0.5/LICENSE 1113 crunched_md5sums['2b8c039b2b9a25f0feb4410c4542d346'] = 'BSD-2-Clause' 1114 # https://raw.githubusercontent.com/terser/terser/v3.17.0/LICENSE 1115 crunched_md5sums['8bd23871802951c9ad63855151204c2c'] = 'BSD-2-Clause' 1116 # https://raw.githubusercontent.com/alexei/sprintf.js/1.0.3/LICENSE 1117 crunched_md5sums['008c22318c8ea65928bf730ddd0273e3'] = 'BSD-3-Clause' 1118 # https://raw.githubusercontent.com/Caligatio/jsSHA/v3.2.0/LICENSE 1119 crunched_md5sums['0e46634a01bfef056892949acaea85b1'] = 'BSD-3-Clause' 1120 # https://raw.githubusercontent.com/d3/d3-path/v1.0.9/LICENSE 1121 crunched_md5sums['b5f72aef53d3b2b432702c30b0215666'] = 'BSD-3-Clause' 1122 # https://raw.githubusercontent.com/feross/ieee754/v1.1.13/LICENSE 1123 crunched_md5sums['a39327c997c20da0937955192d86232d'] = 'BSD-3-Clause' 1124 # https://raw.githubusercontent.com/joyent/node-extsprintf/v1.3.0/LICENSE 1125 crunched_md5sums['721f23a96ff4161ca3a5f071bbe18108'] = 'MIT' 1126 # https://raw.githubusercontent.com/pvorb/clone/v0.2.0/LICENSE 1127 crunched_md5sums['b376d29a53c9573006b9970709231431'] = 'MIT' 1128 # https://raw.githubusercontent.com/andris9/encoding/v0.1.12/LICENSE 1129 crunched_md5sums['85d8a977ee9d7c5ab4ac03c9b95431c4'] = 'MIT-0' 1130 # https://raw.githubusercontent.com/faye/websocket-driver-node/0.7.3/LICENSE.md 1131 crunched_md5sums['b66384e7137e41a9b1904ef4d39703b6'] = 'Apache-2.0' 1132 # https://raw.githubusercontent.com/less/less.js/v4.1.1/LICENSE 1133 crunched_md5sums['b27575459e02221ccef97ec0bfd457ae'] = 'Apache-2.0' 1134 # https://raw.githubusercontent.com/microsoft/TypeScript/v3.5.3/LICENSE.txt 1135 crunched_md5sums['a54a1a6a39e7f9dbb4a23a42f5c7fd1c'] = 'Apache-2.0' 1136 # https://raw.githubusercontent.com/request/request/v2.87.0/LICENSE 1137 crunched_md5sums['1034431802e57486b393d00c5d262b8a'] = 'Apache-2.0' 1138 # https://raw.githubusercontent.com/dchest/tweetnacl-js/v0.14.5/LICENSE 1139 crunched_md5sums['75605e6bdd564791ab698fca65c94a4f'] = 'Unlicense' 1140 # https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md 1141 crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib' 1142 1143 commonlicdir = d.getVar('COMMON_LICENSE_DIR') 1144 for fn in sorted(os.listdir(commonlicdir)): 1145 md5value, lictext = crunch_license(os.path.join(commonlicdir, fn)) 1146 if md5value not in crunched_md5sums: 1147 crunched_md5sums[md5value] = fn 1148 elif fn != crunched_md5sums[md5value]: 1149 bb.debug(2, "crunched_md5sums['%s'] is already set to '%s' rather than '%s'" % (md5value, crunched_md5sums[md5value], fn)) 1150 else: 1151 bb.debug(2, "crunched_md5sums['%s'] is already set to '%s'" % (md5value, crunched_md5sums[md5value])) 1152 1153 return crunched_md5sums 1154 1155def crunch_license(licfile): 1156 ''' 1157 Remove non-material text from a license file and then calculate its 1158 md5sum. This works well for licenses that contain a copyright statement, 1159 but is also a useful way to handle people's insistence upon reformatting 1160 the license text slightly (with no material difference to the text of the 1161 license). 1162 ''' 1163 1164 import oe.utils 1165 1166 # Note: these are carefully constructed! 1167 license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$') 1168 license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$') 1169 copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$') 1170 disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$') 1171 email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$') 1172 header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$') 1173 tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$') 1174 url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$') 1175 1176 lictext = [] 1177 with open(licfile, 'r', errors='surrogateescape') as f: 1178 for line in f: 1179 # Drop opening statements 1180 if copyright_re.match(line): 1181 continue 1182 elif disclaimer_re.match(line): 1183 continue 1184 elif email_re.match(line): 1185 continue 1186 elif header_re.match(line): 1187 continue 1188 elif tag_re.match(line): 1189 continue 1190 elif url_re.match(line): 1191 continue 1192 elif license_title_re.match(line): 1193 continue 1194 elif license_statement_re.match(line): 1195 continue 1196 # Strip comment symbols 1197 line = line.replace('*', '') \ 1198 .replace('#', '') 1199 # Unify spelling 1200 line = line.replace('sub-license', 'sublicense') 1201 # Squash spaces 1202 line = oe.utils.squashspaces(line.strip()) 1203 # Replace smart quotes, double quotes and backticks with single quotes 1204 line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'') 1205 # Unify brackets 1206 line = line.replace("{", "[").replace("}", "]") 1207 if line: 1208 lictext.append(line) 1209 1210 m = hashlib.md5() 1211 try: 1212 m.update(' '.join(lictext).encode('utf-8')) 1213 md5val = m.hexdigest() 1214 except UnicodeEncodeError: 1215 md5val = None 1216 lictext = '' 1217 return md5val, lictext 1218 1219def guess_license(srctree, d): 1220 import bb 1221 md5sums = get_license_md5sums(d) 1222 1223 crunched_md5sums = crunch_known_licenses(d) 1224 1225 licenses = [] 1226 licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10'] 1227 skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go") 1228 licfiles = [] 1229 for root, dirs, files in os.walk(srctree): 1230 for fn in files: 1231 if fn.endswith(skip_extensions): 1232 continue 1233 for spec in licspecs: 1234 if fnmatch.fnmatch(fn, spec): 1235 fullpath = os.path.join(root, fn) 1236 if not fullpath in licfiles: 1237 licfiles.append(fullpath) 1238 for licfile in sorted(licfiles): 1239 md5value = bb.utils.md5_file(licfile) 1240 license = md5sums.get(md5value, None) 1241 if not license: 1242 crunched_md5, lictext = crunch_license(licfile) 1243 license = crunched_md5sums.get(crunched_md5, None) 1244 if lictext and not license: 1245 license = 'Unknown' 1246 logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \ 1247 "and replace `Unknown` with the license:\n" \ 1248 "%s,Unknown" % (os.path.relpath(licfile, srctree), md5value)) 1249 if license: 1250 licenses.append((license, os.path.relpath(licfile, srctree), md5value)) 1251 1252 # FIXME should we grab at least one source file with a license header and add that too? 1253 1254 return licenses 1255 1256def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): 1257 """ 1258 Given a list of (license, path, md5sum) as returned by guess_license(), 1259 a dict of package name to path mappings, write out a set of 1260 package-specific LICENSE values. 1261 """ 1262 pkglicenses = {pn: []} 1263 for license, licpath, _ in licvalues: 1264 license = fixup_license(license) 1265 for pkgname, pkgpath in packages.items(): 1266 if licpath.startswith(pkgpath + '/'): 1267 if pkgname in pkglicenses: 1268 pkglicenses[pkgname].append(license) 1269 else: 1270 pkglicenses[pkgname] = [license] 1271 break 1272 else: 1273 # Accumulate on the main package 1274 pkglicenses[pn].append(license) 1275 outlicenses = {} 1276 for pkgname in packages: 1277 # Assume AND operator between license files 1278 license = ' & '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown' 1279 if license == 'Unknown' and fallback_licenses and pkgname in fallback_licenses: 1280 license = fallback_licenses[pkgname] 1281 licenses = tidy_licenses(license) 1282 license = ' & '.join(licenses) 1283 outlines.append('LICENSE:%s = "%s"' % (pkgname, license)) 1284 outlicenses[pkgname] = licenses 1285 return outlicenses 1286 1287def read_pkgconfig_provides(d): 1288 pkgdatadir = d.getVar('PKGDATA_DIR') 1289 pkgmap = {} 1290 for fn in glob.glob(os.path.join(pkgdatadir, 'shlibs2', '*.pclist')): 1291 with open(fn, 'r') as f: 1292 for line in f: 1293 pkgmap[os.path.basename(line.rstrip())] = os.path.splitext(os.path.basename(fn))[0] 1294 recipemap = {} 1295 for pc, pkg in pkgmap.items(): 1296 pkgdatafile = os.path.join(pkgdatadir, 'runtime', pkg) 1297 if os.path.exists(pkgdatafile): 1298 with open(pkgdatafile, 'r') as f: 1299 for line in f: 1300 if line.startswith('PN: '): 1301 recipemap[pc] = line.split(':', 1)[1].strip() 1302 return recipemap 1303 1304def convert_debian(debpath): 1305 value_map = {'Package': 'PN', 1306 'Version': 'PV', 1307 'Section': 'SECTION', 1308 'License': 'LICENSE', 1309 'Homepage': 'HOMEPAGE'} 1310 1311 # FIXME extend this mapping - perhaps use distro_alias.inc? 1312 depmap = {'libz-dev': 'zlib'} 1313 1314 values = {} 1315 depends = [] 1316 with open(os.path.join(debpath, 'control'), 'r', errors='surrogateescape') as f: 1317 indesc = False 1318 for line in f: 1319 if indesc: 1320 if line.startswith(' '): 1321 if line.startswith(' This package contains'): 1322 indesc = False 1323 else: 1324 if 'DESCRIPTION' in values: 1325 values['DESCRIPTION'] += ' ' + line.strip() 1326 else: 1327 values['DESCRIPTION'] = line.strip() 1328 else: 1329 indesc = False 1330 if not indesc: 1331 splitline = line.split(':', 1) 1332 if len(splitline) < 2: 1333 continue 1334 key = splitline[0] 1335 value = splitline[1].strip() 1336 if key == 'Build-Depends': 1337 for dep in value.split(','): 1338 dep = dep.split()[0] 1339 mapped = depmap.get(dep, '') 1340 if mapped: 1341 depends.append(mapped) 1342 elif key == 'Description': 1343 values['SUMMARY'] = value 1344 indesc = True 1345 else: 1346 varname = value_map.get(key, None) 1347 if varname: 1348 values[varname] = value 1349 postinst = os.path.join(debpath, 'postinst') 1350 postrm = os.path.join(debpath, 'postrm') 1351 preinst = os.path.join(debpath, 'preinst') 1352 prerm = os.path.join(debpath, 'prerm') 1353 sfiles = [postinst, postrm, preinst, prerm] 1354 for sfile in sfiles: 1355 if os.path.isfile(sfile): 1356 logger.info("Converting %s file to recipe function..." % 1357 os.path.basename(sfile).upper()) 1358 content = [] 1359 with open(sfile) as f: 1360 for line in f: 1361 if "#!/" in line: 1362 continue 1363 line = line.rstrip("\n") 1364 if line.strip(): 1365 content.append(line) 1366 if content: 1367 values[os.path.basename(f.name)] = content 1368 1369 #if depends: 1370 # values['DEPENDS'] = ' '.join(depends) 1371 1372 return values 1373 1374def convert_rpm_xml(xmlfile): 1375 '''Converts the output from rpm -qp --xml to a set of variable values''' 1376 import xml.etree.ElementTree as ElementTree 1377 rpmtag_map = {'Name': 'PN', 1378 'Version': 'PV', 1379 'Summary': 'SUMMARY', 1380 'Description': 'DESCRIPTION', 1381 'License': 'LICENSE', 1382 'Url': 'HOMEPAGE'} 1383 1384 values = {} 1385 tree = ElementTree.parse(xmlfile) 1386 root = tree.getroot() 1387 for child in root: 1388 if child.tag == 'rpmTag': 1389 name = child.attrib.get('name', None) 1390 if name: 1391 varname = rpmtag_map.get(name, None) 1392 if varname: 1393 values[varname] = child[0].text 1394 return values 1395 1396 1397def register_commands(subparsers): 1398 parser_create = subparsers.add_parser('create', 1399 help='Create a new recipe', 1400 description='Creates a new recipe from a source tree') 1401 parser_create.add_argument('source', help='Path or URL to source') 1402 parser_create.add_argument('-o', '--outfile', help='Specify filename for recipe to create') 1403 parser_create.add_argument('-p', '--provides', help='Specify an alias for the item provided by the recipe') 1404 parser_create.add_argument('-m', '--machine', help='Make recipe machine-specific as opposed to architecture-specific', action='store_true') 1405 parser_create.add_argument('-x', '--extract-to', metavar='EXTRACTPATH', help='Assuming source is a URL, fetch it and extract it to the directory specified as %(metavar)s') 1406 parser_create.add_argument('-N', '--name', help='Name to use within recipe (PN)') 1407 parser_create.add_argument('-V', '--version', help='Version to use within recipe (PV)') 1408 parser_create.add_argument('-b', '--binary', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure)', action='store_true') 1409 parser_create.add_argument('--also-native', help='Also add native variant (i.e. support building recipe for the build host as well as the target machine)', action='store_true') 1410 parser_create.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR') 1411 group = parser_create.add_mutually_exclusive_group() 1412 group.add_argument('-a', '--autorev', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true") 1413 group.add_argument('-S', '--srcrev', help='Source revision to fetch if fetching from an SCM such as git (default latest)') 1414 parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)') 1415 parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)') 1416 parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies') 1417 parser_create.add_argument('--no-pypi', action="store_true", help='Do not inherit pypi class') 1418 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) 1419 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') 1420 parser_create.set_defaults(func=create_recipe) 1421 1422