1# Recipe creation tool - create command plugin 2# 3# Copyright (C) 2014-2017 Intel Corporation 4# 5# SPDX-License-Identifier: GPL-2.0-only 6# 7 8import sys 9import os 10import argparse 11import glob 12import fnmatch 13import re 14import json 15import logging 16import scriptutils 17from urllib.parse import urlparse, urldefrag, urlsplit 18import hashlib 19import bb.fetch2 20logger = logging.getLogger('recipetool') 21 22tinfoil = None 23plugins = None 24 25def log_error_cond(message, debugonly): 26 if debugonly: 27 logger.debug(message) 28 else: 29 logger.error(message) 30 31def log_info_cond(message, debugonly): 32 if debugonly: 33 logger.debug(message) 34 else: 35 logger.info(message) 36 37def plugin_init(pluginlist): 38 # Take a reference to the list so we can use it later 39 global plugins 40 plugins = pluginlist 41 42def tinfoil_init(instance): 43 global tinfoil 44 tinfoil = instance 45 46class RecipeHandler(object): 47 recipelibmap = {} 48 recipeheadermap = {} 49 recipecmakefilemap = {} 50 recipebinmap = {} 51 52 def __init__(self): 53 self._devtool = False 54 55 @staticmethod 56 def load_libmap(d): 57 '''Load library->recipe mapping''' 58 import oe.package 59 60 if RecipeHandler.recipelibmap: 61 return 62 # First build up library->package mapping 63 d2 = bb.data.createCopy(d) 64 d2.setVar("WORKDIR_PKGDATA", "${PKGDATA_DIR}") 65 shlib_providers = oe.package.read_shlib_providers(d2) 66 libdir = d.getVar('libdir') 67 base_libdir = d.getVar('base_libdir') 68 libpaths = list(set([base_libdir, libdir])) 69 libname_re = re.compile(r'^lib(.+)\.so.*$') 70 pkglibmap = {} 71 for lib, item in shlib_providers.items(): 72 for path, pkg in item.items(): 73 if path in libpaths: 74 res = libname_re.match(lib) 75 if res: 76 libname = res.group(1) 77 if not libname in pkglibmap: 78 pkglibmap[libname] = pkg[0] 79 else: 80 logger.debug('unable to extract library name from %s' % lib) 81 82 # Now turn it into a library->recipe mapping 83 pkgdata_dir = d.getVar('PKGDATA_DIR') 84 for libname, pkg in pkglibmap.items(): 85 try: 86 with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: 87 for line in f: 88 if line.startswith('PN:'): 89 RecipeHandler.recipelibmap[libname] = line.split(':', 1)[-1].strip() 90 break 91 except IOError as ioe: 92 if ioe.errno == 2: 93 logger.warning('unable to find a pkgdata file for package %s' % pkg) 94 else: 95 raise 96 97 # Some overrides - these should be mapped to the virtual 98 RecipeHandler.recipelibmap['GL'] = 'virtual/libgl' 99 RecipeHandler.recipelibmap['EGL'] = 'virtual/egl' 100 RecipeHandler.recipelibmap['GLESv2'] = 'virtual/libgles2' 101 102 @staticmethod 103 def load_devel_filemap(d): 104 '''Build up development file->recipe mapping''' 105 if RecipeHandler.recipeheadermap: 106 return 107 pkgdata_dir = d.getVar('PKGDATA_DIR') 108 includedir = d.getVar('includedir') 109 cmakedir = os.path.join(d.getVar('libdir'), 'cmake') 110 for pkg in glob.glob(os.path.join(pkgdata_dir, 'runtime', '*-dev')): 111 with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: 112 pn = None 113 headers = [] 114 cmakefiles = [] 115 for line in f: 116 if line.startswith('PN:'): 117 pn = line.split(':', 1)[-1].strip() 118 elif line.startswith('FILES_INFO:%s:' % pkg): 119 val = line.split(': ', 1)[1].strip() 120 dictval = json.loads(val) 121 for fullpth in sorted(dictval): 122 if fullpth.startswith(includedir) and fullpth.endswith('.h'): 123 headers.append(os.path.relpath(fullpth, includedir)) 124 elif fullpth.startswith(cmakedir) and fullpth.endswith('.cmake'): 125 cmakefiles.append(os.path.relpath(fullpth, cmakedir)) 126 if pn and headers: 127 for header in headers: 128 RecipeHandler.recipeheadermap[header] = pn 129 if pn and cmakefiles: 130 for fn in cmakefiles: 131 RecipeHandler.recipecmakefilemap[fn] = pn 132 133 @staticmethod 134 def load_binmap(d): 135 '''Build up native binary->recipe mapping''' 136 if RecipeHandler.recipebinmap: 137 return 138 sstate_manifests = d.getVar('SSTATE_MANIFESTS') 139 staging_bindir_native = d.getVar('STAGING_BINDIR_NATIVE') 140 build_arch = d.getVar('BUILD_ARCH') 141 fileprefix = 'manifest-%s-' % build_arch 142 for fn in glob.glob(os.path.join(sstate_manifests, '%s*-native.populate_sysroot' % fileprefix)): 143 with open(fn, 'r') as f: 144 pn = os.path.basename(fn).rsplit('.', 1)[0][len(fileprefix):] 145 for line in f: 146 if line.startswith(staging_bindir_native): 147 prog = os.path.basename(line.rstrip()) 148 RecipeHandler.recipebinmap[prog] = pn 149 150 @staticmethod 151 def checkfiles(path, speclist, recursive=False, excludedirs=None): 152 results = [] 153 if recursive: 154 for root, dirs, files in os.walk(path, topdown=True): 155 if excludedirs: 156 dirs[:] = [d for d in dirs if d not in excludedirs] 157 for fn in files: 158 for spec in speclist: 159 if fnmatch.fnmatch(fn, spec): 160 results.append(os.path.join(root, fn)) 161 else: 162 for spec in speclist: 163 results.extend(glob.glob(os.path.join(path, spec))) 164 return results 165 166 @staticmethod 167 def handle_depends(libdeps, pcdeps, deps, outlines, values, d): 168 if pcdeps: 169 recipemap = read_pkgconfig_provides(d) 170 if libdeps: 171 RecipeHandler.load_libmap(d) 172 173 ignorelibs = ['socket'] 174 ignoredeps = ['gcc-runtime', 'glibc', 'uclibc', 'musl', 'tar-native', 'binutils-native', 'coreutils-native'] 175 176 unmappedpc = [] 177 pcdeps = list(set(pcdeps)) 178 for pcdep in pcdeps: 179 if isinstance(pcdep, str): 180 recipe = recipemap.get(pcdep, None) 181 if recipe: 182 deps.append(recipe) 183 else: 184 if not pcdep.startswith('$'): 185 unmappedpc.append(pcdep) 186 else: 187 for item in pcdep: 188 recipe = recipemap.get(pcdep, None) 189 if recipe: 190 deps.append(recipe) 191 break 192 else: 193 unmappedpc.append('(%s)' % ' or '.join(pcdep)) 194 195 unmappedlibs = [] 196 for libdep in libdeps: 197 if isinstance(libdep, tuple): 198 lib, header = libdep 199 else: 200 lib = libdep 201 header = None 202 203 if lib in ignorelibs: 204 logger.debug('Ignoring library dependency %s' % lib) 205 continue 206 207 recipe = RecipeHandler.recipelibmap.get(lib, None) 208 if recipe: 209 deps.append(recipe) 210 elif recipe is None: 211 if header: 212 RecipeHandler.load_devel_filemap(d) 213 recipe = RecipeHandler.recipeheadermap.get(header, None) 214 if recipe: 215 deps.append(recipe) 216 elif recipe is None: 217 unmappedlibs.append(lib) 218 else: 219 unmappedlibs.append(lib) 220 221 deps = set(deps).difference(set(ignoredeps)) 222 223 if unmappedpc: 224 outlines.append('# NOTE: unable to map the following pkg-config dependencies: %s' % ' '.join(unmappedpc)) 225 outlines.append('# (this is based on recipes that have previously been built and packaged)') 226 227 if unmappedlibs: 228 outlines.append('# NOTE: the following library dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmappedlibs)))) 229 outlines.append('# (this is based on recipes that have previously been built and packaged)') 230 231 if deps: 232 values['DEPENDS'] = ' '.join(deps) 233 234 @staticmethod 235 def genfunction(outlines, funcname, content, python=False, forcespace=False): 236 if python: 237 prefix = 'python ' 238 else: 239 prefix = '' 240 outlines.append('%s%s () {' % (prefix, funcname)) 241 if python or forcespace: 242 indent = ' ' 243 else: 244 indent = '\t' 245 addnoop = not python 246 for line in content: 247 outlines.append('%s%s' % (indent, line)) 248 if addnoop: 249 strippedline = line.lstrip() 250 if strippedline and not strippedline.startswith('#'): 251 addnoop = False 252 if addnoop: 253 # Without this there'll be a syntax error 254 outlines.append('%s:' % indent) 255 outlines.append('}') 256 outlines.append('') 257 258 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): 259 return False 260 261 262def validate_pv(pv): 263 if not pv or '_version' in pv.lower() or pv[0] not in '0123456789': 264 return False 265 return True 266 267def determine_from_filename(srcfile): 268 """Determine name and version from a filename""" 269 if is_package(srcfile): 270 # Force getting the value from the package metadata 271 return None, None 272 273 if '.tar.' in srcfile: 274 namepart = srcfile.split('.tar.')[0] 275 else: 276 namepart = os.path.splitext(srcfile)[0] 277 namepart = namepart.lower().replace('_', '-') 278 if namepart.endswith('.src'): 279 namepart = namepart[:-4] 280 if namepart.endswith('.orig'): 281 namepart = namepart[:-5] 282 splitval = namepart.split('-') 283 logger.debug('determine_from_filename: split name %s into: %s' % (srcfile, splitval)) 284 285 ver_re = re.compile('^v?[0-9]') 286 287 pv = None 288 pn = None 289 if len(splitval) == 1: 290 # Try to split the version out if there is no separator (or a .) 291 res = re.match('^([^0-9]+)([0-9.]+.*)$', namepart) 292 if res: 293 if len(res.group(1)) > 1 and len(res.group(2)) > 1: 294 pn = res.group(1).rstrip('.') 295 pv = res.group(2) 296 else: 297 pn = namepart 298 else: 299 if splitval[-1] in ['source', 'src']: 300 splitval.pop() 301 if len(splitval) > 2 and re.match('^(alpha|beta|stable|release|rc[0-9]|pre[0-9]|p[0-9]|[0-9]{8})', splitval[-1]) and ver_re.match(splitval[-2]): 302 pv = '-'.join(splitval[-2:]) 303 if pv.endswith('-release'): 304 pv = pv[:-8] 305 splitval = splitval[:-2] 306 elif ver_re.match(splitval[-1]): 307 pv = splitval.pop() 308 pn = '-'.join(splitval) 309 if pv and pv.startswith('v'): 310 pv = pv[1:] 311 logger.debug('determine_from_filename: name = "%s" version = "%s"' % (pn, pv)) 312 return (pn, pv) 313 314def determine_from_url(srcuri): 315 """Determine name and version from a URL""" 316 pn = None 317 pv = None 318 parseres = urlparse(srcuri.lower().split(';', 1)[0]) 319 if parseres.path: 320 if 'github.com' in parseres.netloc: 321 res = re.search(r'.*/(.*?)/archive/(.*)-final\.(tar|zip)', parseres.path) 322 if res: 323 pn = res.group(1).strip().replace('_', '-') 324 pv = res.group(2).strip().replace('_', '.') 325 else: 326 res = re.search(r'.*/(.*?)/archive/v?(.*)\.(tar|zip)', parseres.path) 327 if res: 328 pn = res.group(1).strip().replace('_', '-') 329 pv = res.group(2).strip().replace('_', '.') 330 elif 'bitbucket.org' in parseres.netloc: 331 res = re.search(r'.*/(.*?)/get/[a-zA-Z_-]*([0-9][0-9a-zA-Z_.]*)\.(tar|zip)', parseres.path) 332 if res: 333 pn = res.group(1).strip().replace('_', '-') 334 pv = res.group(2).strip().replace('_', '.') 335 336 if not pn and not pv: 337 if parseres.scheme not in ['git', 'gitsm', 'svn', 'hg']: 338 srcfile = os.path.basename(parseres.path.rstrip('/')) 339 pn, pv = determine_from_filename(srcfile) 340 elif parseres.scheme in ['git', 'gitsm']: 341 pn = os.path.basename(parseres.path.rstrip('/')).lower().replace('_', '-') 342 if pn.endswith('.git'): 343 pn = pn[:-4] 344 345 logger.debug('Determined from source URL: name = "%s", version = "%s"' % (pn, pv)) 346 return (pn, pv) 347 348def supports_srcrev(uri): 349 localdata = bb.data.createCopy(tinfoil.config_data) 350 # This is a bit sad, but if you don't have this set there can be some 351 # odd interactions with the urldata cache which lead to errors 352 localdata.setVar('SRCREV', '${AUTOREV}') 353 try: 354 fetcher = bb.fetch2.Fetch([uri], localdata) 355 urldata = fetcher.ud 356 for u in urldata: 357 if urldata[u].method.supports_srcrev(): 358 return True 359 except bb.fetch2.FetchError as e: 360 logger.debug('FetchError in supports_srcrev: %s' % str(e)) 361 # Fall back to basic check 362 if uri.startswith(('git://', 'gitsm://')): 363 return True 364 return False 365 366def reformat_git_uri(uri): 367 '''Convert any http[s]://....git URI into git://...;protocol=http[s]''' 368 checkuri = uri.split(';', 1)[0] 369 if checkuri.endswith('.git') or '/git/' in checkuri or re.match('https?://git(hub|lab).com/[^/]+/[^/]+/?$', checkuri): 370 # Appends scheme if the scheme is missing 371 if not '://' in uri: 372 uri = 'git://' + uri 373 scheme, host, path, user, pswd, parms = bb.fetch2.decodeurl(uri) 374 # Detection mechanism, this is required due to certain URL are formatter with ":" rather than "/" 375 # which causes decodeurl to fail getting the right host and path 376 if len(host.split(':')) > 1: 377 splitslash = host.split(':') 378 # Port number should not be split from host 379 if not re.match('^[0-9]+$', splitslash[1]): 380 host = splitslash[0] 381 path = '/' + splitslash[1] + path 382 #Algorithm: 383 # if user is defined, append protocol=ssh or if a protocol is defined, then honor the user-defined protocol 384 # if no user & password is defined, check for scheme type and append the protocol with the scheme type 385 # finally if protocols or if the url is well-formed, do nothing and rejoin everything back to normal 386 # Need to repackage the arguments for encodeurl, the format is: (scheme, host, path, user, password, OrderedDict([('key', 'value')])) 387 if user: 388 if not 'protocol' in parms: 389 parms.update({('protocol', 'ssh')}) 390 elif (scheme == "http" or scheme == 'https' or scheme == 'ssh') and not ('protocol' in parms): 391 parms.update({('protocol', scheme)}) 392 # Always append 'git://' 393 fUrl = bb.fetch2.encodeurl(('git', host, path, user, pswd, parms)) 394 return fUrl 395 else: 396 return uri 397 398def is_package(url): 399 '''Check if a URL points to a package''' 400 checkurl = url.split(';', 1)[0] 401 if checkurl.endswith(('.deb', '.ipk', '.rpm', '.srpm')): 402 return True 403 return False 404 405def create_recipe(args): 406 import bb.process 407 import tempfile 408 import shutil 409 import oe.recipeutils 410 411 pkgarch = "" 412 if args.machine: 413 pkgarch = "${MACHINE_ARCH}" 414 415 extravalues = {} 416 checksums = {} 417 tempsrc = '' 418 source = args.source 419 srcsubdir = '' 420 srcrev = '${AUTOREV}' 421 srcbranch = '' 422 scheme = '' 423 storeTagName = '' 424 pv_srcpv = False 425 426 if os.path.isfile(source): 427 source = 'file://%s' % os.path.abspath(source) 428 429 if scriptutils.is_src_url(source): 430 # Warn about github archive URLs 431 if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source): 432 logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).') 433 # Fetch a URL 434 fetchuri = reformat_git_uri(urldefrag(source)[0]) 435 if args.binary: 436 # Assume the archive contains the directory structure verbatim 437 # so we need to extract to a subdirectory 438 fetchuri += ';subdir=${BPN}' 439 srcuri = fetchuri 440 rev_re = re.compile(';rev=([^;]+)') 441 res = rev_re.search(srcuri) 442 if res: 443 if args.srcrev: 444 logger.error('rev= parameter and -S/--srcrev option cannot both be specified - use one or the other') 445 sys.exit(1) 446 if args.autorev: 447 logger.error('rev= parameter and -a/--autorev option cannot both be specified - use one or the other') 448 sys.exit(1) 449 srcrev = res.group(1) 450 srcuri = rev_re.sub('', srcuri) 451 elif args.srcrev: 452 srcrev = args.srcrev 453 454 # Check whether users provides any branch info in fetchuri. 455 # If true, we will skip all branch checking process to honor all user's input. 456 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(fetchuri) 457 srcbranch = params.get('branch') 458 if args.srcbranch: 459 if srcbranch: 460 logger.error('branch= parameter and -B/--srcbranch option cannot both be specified - use one or the other') 461 sys.exit(1) 462 srcbranch = args.srcbranch 463 params['branch'] = srcbranch 464 nobranch = params.get('nobranch') 465 if nobranch and srcbranch: 466 logger.error('nobranch= cannot be used if you specify a branch') 467 sys.exit(1) 468 tag = params.get('tag') 469 if not srcbranch and not nobranch and srcrev != '${AUTOREV}': 470 # Append nobranch=1 in the following conditions: 471 # 1. User did not set 'branch=' in srcuri, and 472 # 2. User did not set 'nobranch=1' in srcuri, and 473 # 3. Source revision is not '${AUTOREV}' 474 params['nobranch'] = '1' 475 if tag: 476 # Keep a copy of tag and append nobranch=1 then remove tag from URL. 477 # Bitbake fetcher unable to fetch when {AUTOREV} and tag is set at the same time. 478 storeTagName = params['tag'] 479 params['nobranch'] = '1' 480 del params['tag'] 481 # Assume 'master' branch if not set 482 if scheme in ['git', 'gitsm'] and 'branch' not in params and 'nobranch' not in params: 483 params['branch'] = 'master' 484 fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) 485 486 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') 487 bb.utils.mkdirhier(tmpparent) 488 tempsrc = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent) 489 srctree = os.path.join(tempsrc, 'source') 490 491 try: 492 checksums, ftmpdir = scriptutils.fetch_url(tinfoil, fetchuri, srcrev, srctree, logger, preserve_tmp=args.keep_temp) 493 except scriptutils.FetchUrlFailure as e: 494 logger.error(str(e)) 495 sys.exit(1) 496 497 if ftmpdir and args.keep_temp: 498 logger.info('Fetch temp directory is %s' % ftmpdir) 499 500 dirlist = scriptutils.filter_src_subdirs(srctree) 501 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) 502 if len(dirlist) == 1: 503 singleitem = os.path.join(srctree, dirlist[0]) 504 if os.path.isdir(singleitem): 505 # We unpacked a single directory, so we should use that 506 srcsubdir = dirlist[0] 507 srctree = os.path.join(srctree, srcsubdir) 508 else: 509 check_single_file(dirlist[0], fetchuri) 510 elif len(dirlist) == 0: 511 if '/' in fetchuri: 512 fn = os.path.join(tinfoil.config_data.getVar('DL_DIR'), fetchuri.split('/')[-1]) 513 if os.path.isfile(fn): 514 check_single_file(fn, fetchuri) 515 # If we've got to here then there's no source so we might as well give up 516 logger.error('URL %s resulted in an empty source tree' % fetchuri) 517 sys.exit(1) 518 519 # We need this checking mechanism to improve the recipe created by recipetool and devtool 520 # is able to parse and build by bitbake. 521 # If there is no input for branch name, then check for branch name with SRCREV provided. 522 if not srcbranch and not nobranch and srcrev and (srcrev != '${AUTOREV}') and scheme in ['git', 'gitsm']: 523 try: 524 cmd = 'git branch -r --contains' 525 check_branch, check_branch_err = bb.process.run('%s %s' % (cmd, srcrev), cwd=srctree) 526 except bb.process.ExecutionError as err: 527 logger.error(str(err)) 528 sys.exit(1) 529 get_branch = [x.strip() for x in check_branch.splitlines()] 530 # Remove HEAD reference point and drop remote prefix 531 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] 532 if 'master' in get_branch: 533 # Even with the case where get_branch has multiple objects, if 'master' is one 534 # of them, we should default take from 'master' 535 srcbranch = 'master' 536 elif len(get_branch) == 1: 537 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' 538 srcbranch = get_branch[0] 539 else: 540 # If get_branch contains more than one objects, then display error and exit. 541 mbrch = '\n ' + '\n '.join(get_branch) 542 logger.error('Revision %s was found on multiple branches: %s\nPlease provide the correct branch with -B/--srcbranch' % (srcrev, mbrch)) 543 sys.exit(1) 544 545 # Since we might have a value in srcbranch, we need to 546 # recontruct the srcuri to include 'branch' in params. 547 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri) 548 if scheme in ['git', 'gitsm']: 549 params['branch'] = srcbranch or 'master' 550 551 if storeTagName and scheme in ['git', 'gitsm']: 552 # Check srcrev using tag and check validity of the tag 553 cmd = ('git rev-parse --verify %s' % (storeTagName)) 554 try: 555 check_tag, check_tag_err = bb.process.run('%s' % cmd, cwd=srctree) 556 srcrev = check_tag.split()[0] 557 except bb.process.ExecutionError as err: 558 logger.error(str(err)) 559 logger.error("Possibly wrong tag name is provided") 560 sys.exit(1) 561 # Drop tag from srcuri as it will have conflicts with SRCREV during recipe parse. 562 del params['tag'] 563 srcuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) 564 565 if os.path.exists(os.path.join(srctree, '.gitmodules')) and srcuri.startswith('git://'): 566 srcuri = 'gitsm://' + srcuri[6:] 567 logger.info('Fetching submodules...') 568 bb.process.run('git submodule update --init --recursive', cwd=srctree) 569 570 if is_package(fetchuri): 571 localdata = bb.data.createCopy(tinfoil.config_data) 572 pkgfile = bb.fetch2.localpath(fetchuri, localdata) 573 if pkgfile: 574 tmpfdir = tempfile.mkdtemp(prefix='recipetool-') 575 try: 576 if pkgfile.endswith(('.deb', '.ipk')): 577 stdout, _ = bb.process.run('ar x %s' % pkgfile, cwd=tmpfdir) 578 stdout, _ = bb.process.run('tar xf control.tar.gz', cwd=tmpfdir) 579 values = convert_debian(tmpfdir) 580 extravalues.update(values) 581 elif pkgfile.endswith(('.rpm', '.srpm')): 582 stdout, _ = bb.process.run('rpm -qp --xml %s > pkginfo.xml' % pkgfile, cwd=tmpfdir) 583 values = convert_rpm_xml(os.path.join(tmpfdir, 'pkginfo.xml')) 584 extravalues.update(values) 585 finally: 586 shutil.rmtree(tmpfdir) 587 else: 588 # Assume we're pointing to an existing source tree 589 if args.extract_to: 590 logger.error('--extract-to cannot be specified if source is a directory') 591 sys.exit(1) 592 if not os.path.isdir(source): 593 logger.error('Invalid source directory %s' % source) 594 sys.exit(1) 595 srctree = source 596 srcuri = '' 597 if os.path.exists(os.path.join(srctree, '.git')): 598 # Try to get upstream repo location from origin remote 599 try: 600 stdout, _ = bb.process.run('git remote -v', cwd=srctree, shell=True) 601 except bb.process.ExecutionError as e: 602 stdout = None 603 if stdout: 604 for line in stdout.splitlines(): 605 splitline = line.split() 606 if len(splitline) > 1: 607 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): 608 srcuri = reformat_git_uri(splitline[1]) + ';branch=master' 609 srcsubdir = 'git' 610 break 611 612 if args.src_subdir: 613 srcsubdir = os.path.join(srcsubdir, args.src_subdir) 614 srctree_use = os.path.abspath(os.path.join(srctree, args.src_subdir)) 615 else: 616 srctree_use = os.path.abspath(srctree) 617 618 if args.outfile and os.path.isdir(args.outfile): 619 outfile = None 620 outdir = args.outfile 621 else: 622 outfile = args.outfile 623 outdir = None 624 if outfile and outfile != '-': 625 if os.path.exists(outfile): 626 logger.error('Output file %s already exists' % outfile) 627 sys.exit(1) 628 629 lines_before = [] 630 lines_after = [] 631 632 lines_before.append('# Recipe created by %s' % os.path.basename(sys.argv[0])) 633 lines_before.append('# This is the basis of a recipe and may need further editing in order to be fully functional.') 634 lines_before.append('# (Feel free to remove these comments when editing.)') 635 # We need a blank line here so that patch_recipe_lines can rewind before the LICENSE comments 636 lines_before.append('') 637 638 # We'll come back and replace this later in handle_license_vars() 639 lines_before.append('##LICENSE_PLACEHOLDER##') 640 641 handled = [] 642 classes = [] 643 644 # FIXME This is kind of a hack, we probably ought to be using bitbake to do this 645 pn = None 646 pv = None 647 if outfile: 648 recipefn = os.path.splitext(os.path.basename(outfile))[0] 649 fnsplit = recipefn.split('_') 650 if len(fnsplit) > 1: 651 pn = fnsplit[0] 652 pv = fnsplit[1] 653 else: 654 pn = recipefn 655 656 if args.version: 657 pv = args.version 658 659 if args.name: 660 pn = args.name 661 if args.name.endswith('-native'): 662 if args.also_native: 663 logger.error('--also-native cannot be specified for a recipe named *-native (*-native denotes a recipe that is already only for native) - either remove the -native suffix from the name or drop --also-native') 664 sys.exit(1) 665 classes.append('native') 666 elif args.name.startswith('nativesdk-'): 667 if args.also_native: 668 logger.error('--also-native cannot be specified for a recipe named nativesdk-* (nativesdk-* denotes a recipe that is already only for nativesdk)') 669 sys.exit(1) 670 classes.append('nativesdk') 671 672 if pv and pv not in 'git svn hg'.split(): 673 realpv = pv 674 else: 675 realpv = None 676 677 if not srcuri: 678 lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)') 679 lines_before.append('SRC_URI = "%s"' % srcuri) 680 for key, value in sorted(checksums.items()): 681 lines_before.append('SRC_URI[%s] = "%s"' % (key, value)) 682 if srcuri and supports_srcrev(srcuri): 683 lines_before.append('') 684 lines_before.append('# Modify these as desired') 685 # Note: we have code to replace realpv further down if it gets set to some other value 686 scheme, _, _, _, _, _ = bb.fetch2.decodeurl(srcuri) 687 if scheme in ['git', 'gitsm']: 688 srcpvprefix = 'git' 689 elif scheme == 'svn': 690 srcpvprefix = 'svnr' 691 else: 692 srcpvprefix = scheme 693 lines_before.append('PV = "%s+%s"' % (realpv or '1.0', srcpvprefix)) 694 pv_srcpv = True 695 if not args.autorev and srcrev == '${AUTOREV}': 696 if os.path.exists(os.path.join(srctree, '.git')): 697 (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) 698 srcrev = stdout.rstrip() 699 lines_before.append('SRCREV = "%s"' % srcrev) 700 if args.provides: 701 lines_before.append('PROVIDES = "%s"' % args.provides) 702 lines_before.append('') 703 704 if srcsubdir and not args.binary: 705 # (for binary packages we explicitly specify subdir= when fetching to 706 # match the default value of S, so we don't need to set it in that case) 707 lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir) 708 lines_before.append('') 709 710 if pkgarch: 711 lines_after.append('PACKAGE_ARCH = "%s"' % pkgarch) 712 lines_after.append('') 713 714 if args.binary: 715 lines_after.append('INSANE_SKIP:${PN} += "already-stripped"') 716 lines_after.append('') 717 718 if args.npm_dev: 719 extravalues['NPM_INSTALL_DEV'] = 1 720 721 # Find all plugins that want to register handlers 722 logger.debug('Loading recipe handlers') 723 raw_handlers = [] 724 for plugin in plugins: 725 if hasattr(plugin, 'register_recipe_handlers'): 726 plugin.register_recipe_handlers(raw_handlers) 727 # Sort handlers by priority 728 handlers = [] 729 for i, handler in enumerate(raw_handlers): 730 if isinstance(handler, tuple): 731 handlers.append((handler[0], handler[1], i)) 732 else: 733 handlers.append((handler, 0, i)) 734 handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True) 735 for handler, priority, _ in handlers: 736 logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority)) 737 setattr(handler, '_devtool', args.devtool) 738 handlers = [item[0] for item in handlers] 739 740 # Apply the handlers 741 if args.binary: 742 classes.append('bin_package') 743 handled.append('buildsystem') 744 745 for handler in handlers: 746 handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues) 747 748 # native and nativesdk classes are special and must be inherited last 749 # If present, put them at the end of the classes list 750 classes.sort(key=lambda c: c in ("native", "nativesdk")) 751 752 extrafiles = extravalues.pop('extrafiles', {}) 753 extra_pn = extravalues.pop('PN', None) 754 extra_pv = extravalues.pop('PV', None) 755 756 if extra_pv and not realpv: 757 realpv = extra_pv 758 if not validate_pv(realpv): 759 realpv = None 760 else: 761 realpv = realpv.lower().split()[0] 762 if '_' in realpv: 763 realpv = realpv.replace('_', '-') 764 if extra_pn and not pn: 765 pn = extra_pn 766 if pn.startswith('GNU '): 767 pn = pn[4:] 768 if ' ' in pn: 769 # Probably a descriptive identifier rather than a proper name 770 pn = None 771 else: 772 pn = pn.lower() 773 if '_' in pn: 774 pn = pn.replace('_', '-') 775 776 if srcuri and not realpv or not pn: 777 name_pn, name_pv = determine_from_url(srcuri) 778 if name_pn and not pn: 779 pn = name_pn 780 if name_pv and not realpv: 781 realpv = name_pv 782 783 licvalues = handle_license_vars(srctree_use, lines_before, handled, extravalues, tinfoil.config_data) 784 785 if not outfile: 786 if not pn: 787 log_error_cond('Unable to determine short program name from source tree - please specify name with -N/--name or output file name with -o/--outfile', args.devtool) 788 # devtool looks for this specific exit code, so don't change it 789 sys.exit(15) 790 else: 791 if srcuri and srcuri.startswith(('gitsm://', 'git://', 'hg://', 'svn://')): 792 suffix = srcuri.split(':', 1)[0] 793 if suffix == 'gitsm': 794 suffix = 'git' 795 outfile = '%s_%s.bb' % (pn, suffix) 796 elif realpv: 797 outfile = '%s_%s.bb' % (pn, realpv) 798 else: 799 outfile = '%s.bb' % pn 800 if outdir: 801 outfile = os.path.join(outdir, outfile) 802 # We need to check this again 803 if os.path.exists(outfile): 804 logger.error('Output file %s already exists' % outfile) 805 sys.exit(1) 806 807 # Move any extra files the plugins created to a directory next to the recipe 808 if extrafiles: 809 if outfile == '-': 810 extraoutdir = pn 811 else: 812 extraoutdir = os.path.join(os.path.dirname(outfile), pn) 813 bb.utils.mkdirhier(extraoutdir) 814 for destfn, extrafile in extrafiles.items(): 815 shutil.move(extrafile, os.path.join(extraoutdir, destfn)) 816 817 lines = lines_before 818 lines_before = [] 819 skipblank = True 820 for line in lines: 821 if skipblank: 822 skipblank = False 823 if not line: 824 continue 825 if line.startswith('S = '): 826 if realpv and pv not in 'git svn hg'.split(): 827 line = line.replace(realpv, '${PV}') 828 if pn: 829 line = line.replace(pn, '${BPN}') 830 if line == 'S = "${WORKDIR}/${BPN}-${PV}"': 831 skipblank = True 832 continue 833 elif line.startswith('SRC_URI = '): 834 if realpv and not pv_srcpv: 835 line = line.replace(realpv, '${PV}') 836 elif line.startswith('PV = '): 837 if realpv: 838 # Replace the first part of the PV value 839 line = re.sub(r'"[^+]*\+', '"%s+' % realpv, line) 840 lines_before.append(line) 841 842 if args.also_native: 843 lines = lines_after 844 lines_after = [] 845 bbclassextend = None 846 for line in lines: 847 if line.startswith('BBCLASSEXTEND ='): 848 splitval = line.split('"') 849 if len(splitval) > 1: 850 bbclassextend = splitval[1].split() 851 if not 'native' in bbclassextend: 852 bbclassextend.insert(0, 'native') 853 line = 'BBCLASSEXTEND = "%s"' % ' '.join(bbclassextend) 854 lines_after.append(line) 855 if not bbclassextend: 856 lines_after.append('BBCLASSEXTEND = "native"') 857 858 postinst = ("postinst", extravalues.pop('postinst', None)) 859 postrm = ("postrm", extravalues.pop('postrm', None)) 860 preinst = ("preinst", extravalues.pop('preinst', None)) 861 prerm = ("prerm", extravalues.pop('prerm', None)) 862 funcs = [postinst, postrm, preinst, prerm] 863 for func in funcs: 864 if func[1]: 865 RecipeHandler.genfunction(lines_after, 'pkg_%s_${PN}' % func[0], func[1]) 866 867 outlines = [] 868 outlines.extend(lines_before) 869 if classes: 870 if outlines[-1] and not outlines[-1].startswith('#'): 871 outlines.append('') 872 outlines.append('inherit %s' % ' '.join(classes)) 873 outlines.append('') 874 outlines.extend(lines_after) 875 876 if extravalues: 877 _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=False) 878 879 if args.extract_to: 880 scriptutils.git_convert_standalone_clone(srctree) 881 if os.path.isdir(args.extract_to): 882 # If the directory exists we'll move the temp dir into it instead of 883 # its contents - of course, we could try to always move its contents 884 # but that is a pain if there are symlinks; the simplest solution is 885 # to just remove it first 886 os.rmdir(args.extract_to) 887 shutil.move(srctree, args.extract_to) 888 if tempsrc == srctree: 889 tempsrc = None 890 log_info_cond('Source extracted to %s' % args.extract_to, args.devtool) 891 892 if outfile == '-': 893 sys.stdout.write('\n'.join(outlines) + '\n') 894 else: 895 with open(outfile, 'w') as f: 896 lastline = None 897 for line in outlines: 898 if not lastline and not line: 899 # Skip extra blank lines 900 continue 901 f.write('%s\n' % line) 902 lastline = line 903 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) 904 tinfoil.modified_files() 905 906 if tempsrc: 907 if args.keep_temp: 908 logger.info('Preserving temporary directory %s' % tempsrc) 909 else: 910 shutil.rmtree(tempsrc) 911 912 return 0 913 914def check_single_file(fn, fetchuri): 915 """Determine if a single downloaded file is something we can't handle""" 916 with open(fn, 'r', errors='surrogateescape') as f: 917 if '<html' in f.read(100).lower(): 918 logger.error('Fetching "%s" returned a single HTML page - check the URL is correct and functional' % fetchuri) 919 sys.exit(1) 920 921def split_value(value): 922 if isinstance(value, str): 923 return value.split() 924 else: 925 return value 926 927def fixup_license(value): 928 # Ensure licenses with OR starts and ends with brackets 929 if '|' in value: 930 return '(' + value + ')' 931 return value 932 933def tidy_licenses(value): 934 """Flat, split and sort licenses""" 935 from oe.license import flattened_licenses 936 def _choose(a, b): 937 str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold) 938 return ["(%s | %s)" % (str_a, str_b)] 939 if not isinstance(value, str): 940 value = " & ".join(value) 941 return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold) 942 943def handle_license_vars(srctree, lines_before, handled, extravalues, d): 944 lichandled = [x for x in handled if x[0] == 'license'] 945 if lichandled: 946 # Someone else has already handled the license vars, just return their value 947 return lichandled[0][1] 948 949 licvalues = guess_license(srctree, d) 950 licenses = [] 951 lic_files_chksum = [] 952 lic_unknown = [] 953 lines = [] 954 if licvalues: 955 for licvalue in licvalues: 956 license = licvalue[0] 957 lics = tidy_licenses(fixup_license(license)) 958 lics = [lic for lic in lics if lic not in licenses] 959 if len(lics): 960 licenses.extend(lics) 961 lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2])) 962 if license == 'Unknown': 963 lic_unknown.append(licvalue[1]) 964 if lic_unknown: 965 lines.append('#') 966 lines.append('# The following license files were not able to be identified and are') 967 lines.append('# represented as "Unknown" below, you will need to check them yourself:') 968 for licfile in lic_unknown: 969 lines.append('# %s' % licfile) 970 971 extra_license = tidy_licenses(extravalues.pop('LICENSE', '')) 972 if extra_license: 973 if licenses == ['Unknown']: 974 licenses = extra_license 975 else: 976 for item in extra_license: 977 if item not in licenses: 978 licenses.append(item) 979 extra_lic_files_chksum = split_value(extravalues.pop('LIC_FILES_CHKSUM', [])) 980 for item in extra_lic_files_chksum: 981 if item not in lic_files_chksum: 982 lic_files_chksum.append(item) 983 984 if lic_files_chksum: 985 # We are going to set the vars, so prepend the standard disclaimer 986 lines.insert(0, '# WARNING: the following LICENSE and LIC_FILES_CHKSUM values are best guesses - it is') 987 lines.insert(1, '# your responsibility to verify that the values are complete and correct.') 988 else: 989 # Without LIC_FILES_CHKSUM we set LICENSE = "CLOSED" to allow the 990 # user to get started easily 991 lines.append('# Unable to find any files that looked like license statements. Check the accompanying') 992 lines.append('# documentation and source headers and set LICENSE and LIC_FILES_CHKSUM accordingly.') 993 lines.append('#') 994 lines.append('# NOTE: LICENSE is being set to "CLOSED" to allow you to at least start building - if') 995 lines.append('# this is not accurate with respect to the licensing of the software being built (it') 996 lines.append('# will not be in most cases) you must specify the correct value before using this') 997 lines.append('# recipe for anything other than initial testing/development!') 998 licenses = ['CLOSED'] 999 1000 if extra_license and sorted(licenses) != sorted(extra_license): 1001 lines.append('# NOTE: Original package / source metadata indicates license is: %s' % ' & '.join(extra_license)) 1002 1003 if len(licenses) > 1: 1004 lines.append('#') 1005 lines.append('# NOTE: multiple licenses have been detected; they have been separated with &') 1006 lines.append('# in the LICENSE value for now since it is a reasonable assumption that all') 1007 lines.append('# of the licenses apply. If instead there is a choice between the multiple') 1008 lines.append('# licenses then you should change the value to separate the licenses with |') 1009 lines.append('# instead of &. If there is any doubt, check the accompanying documentation') 1010 lines.append('# to determine which situation is applicable.') 1011 1012 lines.append('LICENSE = "%s"' % ' & '.join(sorted(licenses, key=str.casefold))) 1013 lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum)) 1014 lines.append('') 1015 1016 # Replace the placeholder so we get the values in the right place in the recipe file 1017 try: 1018 pos = lines_before.index('##LICENSE_PLACEHOLDER##') 1019 except ValueError: 1020 pos = -1 1021 if pos == -1: 1022 lines_before.extend(lines) 1023 else: 1024 lines_before[pos:pos+1] = lines 1025 1026 handled.append(('license', licvalues)) 1027 return licvalues 1028 1029def get_license_md5sums(d, static_only=False, linenumbers=False): 1030 import bb.utils 1031 import csv 1032 md5sums = {} 1033 if not static_only and not linenumbers: 1034 # Gather md5sums of license files in common license dir 1035 commonlicdir = d.getVar('COMMON_LICENSE_DIR') 1036 for fn in os.listdir(commonlicdir): 1037 md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn)) 1038 md5sums[md5value] = fn 1039 1040 # The following were extracted from common values in various recipes 1041 # (double checking the license against the license file itself, not just 1042 # the LICENSE value in the recipe) 1043 1044 # Read license md5sums from csv file 1045 scripts_path = os.path.dirname(os.path.realpath(__file__)) 1046 for path in (d.getVar('BBPATH').split(':') 1047 + [os.path.join(scripts_path, '..', '..')]): 1048 csv_path = os.path.join(path, 'lib', 'recipetool', 'licenses.csv') 1049 if os.path.isfile(csv_path): 1050 with open(csv_path, newline='') as csv_file: 1051 fieldnames = ['md5sum', 'license', 'beginline', 'endline', 'md5'] 1052 reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames) 1053 for row in reader: 1054 if linenumbers: 1055 md5sums[row['md5sum']] = ( 1056 row['license'], row['beginline'], row['endline'], row['md5']) 1057 else: 1058 md5sums[row['md5sum']] = row['license'] 1059 1060 return md5sums 1061 1062def crunch_license(licfile): 1063 ''' 1064 Remove non-material text from a license file and then check 1065 its md5sum against a known list. This works well for licenses 1066 which contain a copyright statement, but is also a useful way 1067 to handle people's insistence upon reformatting the license text 1068 slightly (with no material difference to the text of the 1069 license). 1070 ''' 1071 1072 import oe.utils 1073 1074 # Note: these are carefully constructed! 1075 license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$') 1076 license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$') 1077 copyright_re = re.compile('^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$') 1078 disclaimer_re = re.compile('^ *\*? ?All [Rr]ights [Rr]eserved\.$') 1079 email_re = re.compile('^.*<[\w\.-]*@[\w\.\-]*>$') 1080 header_re = re.compile('^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$') 1081 tag_re = re.compile('^ *@?\(?([Ll]icense|MIT)\)?$') 1082 url_re = re.compile('^ *[#\*]* *https?:\/\/[\w\.\/\-]+$') 1083 1084 crunched_md5sums = {} 1085 1086 # common licenses 1087 crunched_md5sums['89f3bf322f30a1dcfe952e09945842f0'] = 'Apache-2.0' 1088 crunched_md5sums['13b6fe3075f8f42f2270a748965bf3a1'] = '0BSD' 1089 crunched_md5sums['ba87a7d7c20719c8df4b8beed9b78c43'] = 'BSD-2-Clause' 1090 crunched_md5sums['7f8892c03b72de419c27be4ebfa253f8'] = 'BSD-3-Clause' 1091 crunched_md5sums['21128c0790b23a8a9f9e260d5f6b3619'] = 'BSL-1.0' 1092 crunched_md5sums['975742a59ae1b8abdea63a97121f49f4'] = 'EDL-1.0' 1093 crunched_md5sums['5322cee4433d84fb3aafc9e253116447'] = 'EPL-1.0' 1094 crunched_md5sums['6922352e87de080f42419bed93063754'] = 'EPL-2.0' 1095 crunched_md5sums['793475baa22295cae1d3d4046a3a0ceb'] = 'GPL-2.0-only' 1096 crunched_md5sums['ff9047f969b02c20f0559470df5cb433'] = 'GPL-2.0-or-later' 1097 crunched_md5sums['ea6de5453fcadf534df246e6cdafadcd'] = 'GPL-3.0-only' 1098 crunched_md5sums['b419257d4d153a6fde92ddf96acf5b67'] = 'GPL-3.0-or-later' 1099 crunched_md5sums['228737f4c49d3ee75b8fb3706b090b84'] = 'ISC' 1100 crunched_md5sums['c6a782e826ca4e85bf7f8b89435a677d'] = 'LGPL-2.0-only' 1101 crunched_md5sums['32d8f758a066752f0db09bd7624b8090'] = 'LGPL-2.0-or-later' 1102 crunched_md5sums['4820937eb198b4f84c52217ed230be33'] = 'LGPL-2.1-only' 1103 crunched_md5sums['db13fe9f3a13af7adab2dc7a76f9e44a'] = 'LGPL-2.1-or-later' 1104 crunched_md5sums['d7a0f2e4e0950e837ac3eabf5bd1d246'] = 'LGPL-3.0-only' 1105 crunched_md5sums['abbf328e2b434f9153351f06b9f79d02'] = 'LGPL-3.0-or-later' 1106 crunched_md5sums['eecf6429523cbc9693547cf2db790b5c'] = 'MIT' 1107 crunched_md5sums['b218b0e94290b9b818c4be67c8e1cc82'] = 'MIT-0' 1108 crunched_md5sums['ddc18131d6748374f0f35a621c245b49'] = 'Unlicense' 1109 crunched_md5sums['51f9570ff32571fc0a443102285c5e33'] = 'WTFPL' 1110 1111 # The following two were gleaned from the "forever" npm package 1112 crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC' 1113 # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt 1114 crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause' 1115 # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE 1116 crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPL-2.0-only' 1117 # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt 1118 crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPL-2.0-only' 1119 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1 1120 crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPL-2.1-only' 1121 # unixODBC-2.3.4 COPYING 1122 crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPL-2.1-only' 1123 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3 1124 crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPL-3.0-only' 1125 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10 1126 crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0' 1127 1128 # https://raw.githubusercontent.com/jquery/esprima/3.1.3/LICENSE.BSD 1129 crunched_md5sums['80fa7b56a28e8c902e6af194003220a5'] = 'BSD-2-Clause' 1130 # https://raw.githubusercontent.com/npm/npm-install-checks/master/LICENSE 1131 crunched_md5sums['e659f77bfd9002659e112d0d3d59b2c1'] = 'BSD-2-Clause' 1132 # https://raw.githubusercontent.com/silverwind/default-gateway/4.2.0/LICENSE 1133 crunched_md5sums['4c641f2d995c47f5cb08bdb4b5b6ea05'] = 'BSD-2-Clause' 1134 # https://raw.githubusercontent.com/tad-lispy/node-damerau-levenshtein/v1.0.5/LICENSE 1135 crunched_md5sums['2b8c039b2b9a25f0feb4410c4542d346'] = 'BSD-2-Clause' 1136 # https://raw.githubusercontent.com/terser/terser/v3.17.0/LICENSE 1137 crunched_md5sums['8bd23871802951c9ad63855151204c2c'] = 'BSD-2-Clause' 1138 # https://raw.githubusercontent.com/alexei/sprintf.js/1.0.3/LICENSE 1139 crunched_md5sums['008c22318c8ea65928bf730ddd0273e3'] = 'BSD-3-Clause' 1140 # https://raw.githubusercontent.com/Caligatio/jsSHA/v3.2.0/LICENSE 1141 crunched_md5sums['0e46634a01bfef056892949acaea85b1'] = 'BSD-3-Clause' 1142 # https://raw.githubusercontent.com/d3/d3-path/v1.0.9/LICENSE 1143 crunched_md5sums['b5f72aef53d3b2b432702c30b0215666'] = 'BSD-3-Clause' 1144 # https://raw.githubusercontent.com/feross/ieee754/v1.1.13/LICENSE 1145 crunched_md5sums['a39327c997c20da0937955192d86232d'] = 'BSD-3-Clause' 1146 # https://raw.githubusercontent.com/joyent/node-extsprintf/v1.3.0/LICENSE 1147 crunched_md5sums['721f23a96ff4161ca3a5f071bbe18108'] = 'MIT' 1148 # https://raw.githubusercontent.com/pvorb/clone/v0.2.0/LICENSE 1149 crunched_md5sums['b376d29a53c9573006b9970709231431'] = 'MIT' 1150 # https://raw.githubusercontent.com/andris9/encoding/v0.1.12/LICENSE 1151 crunched_md5sums['85d8a977ee9d7c5ab4ac03c9b95431c4'] = 'MIT-0' 1152 # https://raw.githubusercontent.com/faye/websocket-driver-node/0.7.3/LICENSE.md 1153 crunched_md5sums['b66384e7137e41a9b1904ef4d39703b6'] = 'Apache-2.0' 1154 # https://raw.githubusercontent.com/less/less.js/v4.1.1/LICENSE 1155 crunched_md5sums['b27575459e02221ccef97ec0bfd457ae'] = 'Apache-2.0' 1156 # https://raw.githubusercontent.com/microsoft/TypeScript/v3.5.3/LICENSE.txt 1157 crunched_md5sums['a54a1a6a39e7f9dbb4a23a42f5c7fd1c'] = 'Apache-2.0' 1158 # https://raw.githubusercontent.com/request/request/v2.87.0/LICENSE 1159 crunched_md5sums['1034431802e57486b393d00c5d262b8a'] = 'Apache-2.0' 1160 # https://raw.githubusercontent.com/dchest/tweetnacl-js/v0.14.5/LICENSE 1161 crunched_md5sums['75605e6bdd564791ab698fca65c94a4f'] = 'Unlicense' 1162 # https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md 1163 crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib' 1164 1165 lictext = [] 1166 with open(licfile, 'r', errors='surrogateescape') as f: 1167 for line in f: 1168 # Drop opening statements 1169 if copyright_re.match(line): 1170 continue 1171 elif disclaimer_re.match(line): 1172 continue 1173 elif email_re.match(line): 1174 continue 1175 elif header_re.match(line): 1176 continue 1177 elif tag_re.match(line): 1178 continue 1179 elif url_re.match(line): 1180 continue 1181 elif license_title_re.match(line): 1182 continue 1183 elif license_statement_re.match(line): 1184 continue 1185 # Strip comment symbols 1186 line = line.replace('*', '') \ 1187 .replace('#', '') 1188 # Unify spelling 1189 line = line.replace('sub-license', 'sublicense') 1190 # Squash spaces 1191 line = oe.utils.squashspaces(line.strip()) 1192 # Replace smart quotes, double quotes and backticks with single quotes 1193 line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'') 1194 # Unify brackets 1195 line = line.replace("{", "[").replace("}", "]") 1196 if line: 1197 lictext.append(line) 1198 1199 m = hashlib.md5() 1200 try: 1201 m.update(' '.join(lictext).encode('utf-8')) 1202 md5val = m.hexdigest() 1203 except UnicodeEncodeError: 1204 md5val = None 1205 lictext = '' 1206 license = crunched_md5sums.get(md5val, None) 1207 return license, md5val, lictext 1208 1209def guess_license(srctree, d): 1210 import bb 1211 md5sums = get_license_md5sums(d) 1212 1213 licenses = [] 1214 licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10'] 1215 skip_extensions = (".html", ".js", ".json", ".svg", ".ts") 1216 licfiles = [] 1217 for root, dirs, files in os.walk(srctree): 1218 for fn in files: 1219 if fn.endswith(skip_extensions): 1220 continue 1221 for spec in licspecs: 1222 if fnmatch.fnmatch(fn, spec): 1223 fullpath = os.path.join(root, fn) 1224 if not fullpath in licfiles: 1225 licfiles.append(fullpath) 1226 for licfile in sorted(licfiles): 1227 md5value = bb.utils.md5_file(licfile) 1228 license = md5sums.get(md5value, None) 1229 if not license: 1230 license, crunched_md5, lictext = crunch_license(licfile) 1231 if lictext and not license: 1232 license = 'Unknown' 1233 logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \ 1234 "and replace `Unknown` with the license:\n" \ 1235 "%s,Unknown" % (os.path.relpath(licfile, srctree), md5value)) 1236 if license: 1237 licenses.append((license, os.path.relpath(licfile, srctree), md5value)) 1238 1239 # FIXME should we grab at least one source file with a license header and add that too? 1240 1241 return licenses 1242 1243def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): 1244 """ 1245 Given a list of (license, path, md5sum) as returned by guess_license(), 1246 a dict of package name to path mappings, write out a set of 1247 package-specific LICENSE values. 1248 """ 1249 pkglicenses = {pn: []} 1250 for license, licpath, _ in licvalues: 1251 license = fixup_license(license) 1252 for pkgname, pkgpath in packages.items(): 1253 if licpath.startswith(pkgpath + '/'): 1254 if pkgname in pkglicenses: 1255 pkglicenses[pkgname].append(license) 1256 else: 1257 pkglicenses[pkgname] = [license] 1258 break 1259 else: 1260 # Accumulate on the main package 1261 pkglicenses[pn].append(license) 1262 outlicenses = {} 1263 for pkgname in packages: 1264 # Assume AND operator between license files 1265 license = ' & '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown' 1266 if license == 'Unknown' and fallback_licenses and pkgname in fallback_licenses: 1267 license = fallback_licenses[pkgname] 1268 licenses = tidy_licenses(license) 1269 license = ' & '.join(licenses) 1270 outlines.append('LICENSE:%s = "%s"' % (pkgname, license)) 1271 outlicenses[pkgname] = licenses 1272 return outlicenses 1273 1274def read_pkgconfig_provides(d): 1275 pkgdatadir = d.getVar('PKGDATA_DIR') 1276 pkgmap = {} 1277 for fn in glob.glob(os.path.join(pkgdatadir, 'shlibs2', '*.pclist')): 1278 with open(fn, 'r') as f: 1279 for line in f: 1280 pkgmap[os.path.basename(line.rstrip())] = os.path.splitext(os.path.basename(fn))[0] 1281 recipemap = {} 1282 for pc, pkg in pkgmap.items(): 1283 pkgdatafile = os.path.join(pkgdatadir, 'runtime', pkg) 1284 if os.path.exists(pkgdatafile): 1285 with open(pkgdatafile, 'r') as f: 1286 for line in f: 1287 if line.startswith('PN: '): 1288 recipemap[pc] = line.split(':', 1)[1].strip() 1289 return recipemap 1290 1291def convert_debian(debpath): 1292 value_map = {'Package': 'PN', 1293 'Version': 'PV', 1294 'Section': 'SECTION', 1295 'License': 'LICENSE', 1296 'Homepage': 'HOMEPAGE'} 1297 1298 # FIXME extend this mapping - perhaps use distro_alias.inc? 1299 depmap = {'libz-dev': 'zlib'} 1300 1301 values = {} 1302 depends = [] 1303 with open(os.path.join(debpath, 'control'), 'r', errors='surrogateescape') as f: 1304 indesc = False 1305 for line in f: 1306 if indesc: 1307 if line.startswith(' '): 1308 if line.startswith(' This package contains'): 1309 indesc = False 1310 else: 1311 if 'DESCRIPTION' in values: 1312 values['DESCRIPTION'] += ' ' + line.strip() 1313 else: 1314 values['DESCRIPTION'] = line.strip() 1315 else: 1316 indesc = False 1317 if not indesc: 1318 splitline = line.split(':', 1) 1319 if len(splitline) < 2: 1320 continue 1321 key = splitline[0] 1322 value = splitline[1].strip() 1323 if key == 'Build-Depends': 1324 for dep in value.split(','): 1325 dep = dep.split()[0] 1326 mapped = depmap.get(dep, '') 1327 if mapped: 1328 depends.append(mapped) 1329 elif key == 'Description': 1330 values['SUMMARY'] = value 1331 indesc = True 1332 else: 1333 varname = value_map.get(key, None) 1334 if varname: 1335 values[varname] = value 1336 postinst = os.path.join(debpath, 'postinst') 1337 postrm = os.path.join(debpath, 'postrm') 1338 preinst = os.path.join(debpath, 'preinst') 1339 prerm = os.path.join(debpath, 'prerm') 1340 sfiles = [postinst, postrm, preinst, prerm] 1341 for sfile in sfiles: 1342 if os.path.isfile(sfile): 1343 logger.info("Converting %s file to recipe function..." % 1344 os.path.basename(sfile).upper()) 1345 content = [] 1346 with open(sfile) as f: 1347 for line in f: 1348 if "#!/" in line: 1349 continue 1350 line = line.rstrip("\n") 1351 if line.strip(): 1352 content.append(line) 1353 if content: 1354 values[os.path.basename(f.name)] = content 1355 1356 #if depends: 1357 # values['DEPENDS'] = ' '.join(depends) 1358 1359 return values 1360 1361def convert_rpm_xml(xmlfile): 1362 '''Converts the output from rpm -qp --xml to a set of variable values''' 1363 import xml.etree.ElementTree as ElementTree 1364 rpmtag_map = {'Name': 'PN', 1365 'Version': 'PV', 1366 'Summary': 'SUMMARY', 1367 'Description': 'DESCRIPTION', 1368 'License': 'LICENSE', 1369 'Url': 'HOMEPAGE'} 1370 1371 values = {} 1372 tree = ElementTree.parse(xmlfile) 1373 root = tree.getroot() 1374 for child in root: 1375 if child.tag == 'rpmTag': 1376 name = child.attrib.get('name', None) 1377 if name: 1378 varname = rpmtag_map.get(name, None) 1379 if varname: 1380 values[varname] = child[0].text 1381 return values 1382 1383 1384def register_commands(subparsers): 1385 parser_create = subparsers.add_parser('create', 1386 help='Create a new recipe', 1387 description='Creates a new recipe from a source tree') 1388 parser_create.add_argument('source', help='Path or URL to source') 1389 parser_create.add_argument('-o', '--outfile', help='Specify filename for recipe to create') 1390 parser_create.add_argument('-p', '--provides', help='Specify an alias for the item provided by the recipe') 1391 parser_create.add_argument('-m', '--machine', help='Make recipe machine-specific as opposed to architecture-specific', action='store_true') 1392 parser_create.add_argument('-x', '--extract-to', metavar='EXTRACTPATH', help='Assuming source is a URL, fetch it and extract it to the directory specified as %(metavar)s') 1393 parser_create.add_argument('-N', '--name', help='Name to use within recipe (PN)') 1394 parser_create.add_argument('-V', '--version', help='Version to use within recipe (PV)') 1395 parser_create.add_argument('-b', '--binary', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure)', action='store_true') 1396 parser_create.add_argument('--also-native', help='Also add native variant (i.e. support building recipe for the build host as well as the target machine)', action='store_true') 1397 parser_create.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR') 1398 group = parser_create.add_mutually_exclusive_group() 1399 group.add_argument('-a', '--autorev', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true") 1400 group.add_argument('-S', '--srcrev', help='Source revision to fetch if fetching from an SCM such as git (default latest)') 1401 parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)') 1402 parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)') 1403 parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies') 1404 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) 1405 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') 1406 parser_create.set_defaults(func=create_recipe) 1407 1408