1# 2# BitBake ToasterUI Implementation 3# 4# Copyright (C) 2013 Intel Corporation 5# 6# SPDX-License-Identifier: GPL-2.0-only 7# 8 9import sys 10import bb 11import re 12import os 13 14import django 15from django.utils import timezone 16 17import toaster 18# Add toaster module to the search path to help django.setup() find the right 19# modules 20sys.path.insert(0, os.path.dirname(toaster.__file__)) 21 22#Set the DJANGO_SETTINGS_MODULE if it's not already set 23os.environ["DJANGO_SETTINGS_MODULE"] =\ 24 os.environ.get("DJANGO_SETTINGS_MODULE", 25 "toaster.toastermain.settings") 26# Setup django framework (needs to be done before importing modules) 27django.setup() 28 29from orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText 30from orm.models import Target_Image_File, TargetKernelFile, TargetSDKFile 31from orm.models import Variable, VariableHistory 32from orm.models import Package, Package_File, Target_Installed_Package, Target_File 33from orm.models import Task_Dependency, Package_Dependency 34from orm.models import Recipe_Dependency, Provides 35from orm.models import Project, CustomImagePackage 36from orm.models import signal_runbuilds 37 38from bldcontrol.models import BuildEnvironment, BuildRequest 39from bldcontrol.models import BRLayer 40from bldcontrol import bbcontroller 41 42from bb.msg import BBLogFormatter as formatter 43from django.db import models 44from pprint import pformat 45import logging 46from datetime import datetime, timedelta 47 48from django.db import transaction 49 50 51# pylint: disable=invalid-name 52# the logger name is standard throughout BitBake 53logger = logging.getLogger("ToasterLogger") 54 55class NotExisting(Exception): 56 pass 57 58class ORMWrapper(object): 59 """ This class creates the dictionaries needed to store information in the database 60 following the format defined by the Django models. It is also used to save this 61 information in the database. 62 """ 63 64 def __init__(self): 65 self.layer_version_objects = [] 66 self.layer_version_built = [] 67 self.task_objects = {} 68 self.recipe_objects = {} 69 70 @staticmethod 71 def _build_key(**kwargs): 72 key = "0" 73 for k in sorted(kwargs.keys()): 74 if isinstance(kwargs[k], models.Model): 75 key += "-%d" % kwargs[k].id 76 else: 77 key += "-%s" % str(kwargs[k]) 78 return key 79 80 81 def _cached_get_or_create(self, clazz, **kwargs): 82 """ This is a memory-cached get_or_create. We assume that the objects will not be created in the 83 database through any other means. 84 """ 85 86 assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument" 87 88 key = ORMWrapper._build_key(**kwargs) 89 dictname = "objects_%s" % clazz.__name__ 90 if not dictname in vars(self).keys(): 91 vars(self)[dictname] = {} 92 93 created = False 94 if not key in vars(self)[dictname].keys(): 95 vars(self)[dictname][key], created = \ 96 clazz.objects.get_or_create(**kwargs) 97 98 return (vars(self)[dictname][key], created) 99 100 101 def _cached_get(self, clazz, **kwargs): 102 """ This is a memory-cached get. We assume that the objects will not change in the database between gets. 103 """ 104 assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument" 105 106 key = ORMWrapper._build_key(**kwargs) 107 dictname = "objects_%s" % clazz.__name__ 108 109 if not dictname in vars(self).keys(): 110 vars(self)[dictname] = {} 111 112 if not key in vars(self)[dictname].keys(): 113 vars(self)[dictname][key] = clazz.objects.get(**kwargs) 114 115 return vars(self)[dictname][key] 116 117 def get_similar_target_with_image_files(self, target): 118 """ 119 Get a Target object "similar" to target; i.e. with the same target 120 name ('core-image-minimal' etc.) and machine. 121 """ 122 return target.get_similar_target_with_image_files() 123 124 def get_similar_target_with_sdk_files(self, target): 125 return target.get_similar_target_with_sdk_files() 126 127 def clone_image_artifacts(self, target_from, target_to): 128 target_to.clone_image_artifacts_from(target_from) 129 130 def clone_sdk_artifacts(self, target_from, target_to): 131 target_to.clone_sdk_artifacts_from(target_from) 132 133 def _timestamp_to_datetime(self, secs): 134 """ 135 Convert timestamp in seconds to Python datetime 136 """ 137 return timezone.make_aware(datetime(1970, 1, 1) + timedelta(seconds=secs)) 138 139 # pylint: disable=no-self-use 140 # we disable detection of no self use in functions because the methods actually work on the object 141 # even if they don't touch self anywhere 142 143 # pylint: disable=bad-continuation 144 # we do not follow the python conventions for continuation indentation due to long lines here 145 146 def get_or_create_build_object(self, brbe): 147 prj = None 148 buildrequest = None 149 if brbe is not None: 150 # Toaster-triggered build 151 logger.debug("buildinfohelper: brbe is %s" % brbe) 152 br, _ = brbe.split(":") 153 buildrequest = BuildRequest.objects.get(pk=br) 154 prj = buildrequest.project 155 else: 156 # CLI build 157 prj = Project.objects.get_or_create_default_project() 158 logger.debug("buildinfohelper: project is not specified, defaulting to %s" % prj) 159 160 if buildrequest is not None: 161 # reuse existing Build object 162 build = buildrequest.build 163 build.project = prj 164 build.save() 165 else: 166 # create new Build object 167 now = timezone.now() 168 build = Build.objects.create( 169 project=prj, 170 started_on=now, 171 completed_on=now, 172 build_name='') 173 174 logger.debug("buildinfohelper: build is created %s" % build) 175 176 if buildrequest is not None: 177 buildrequest.build = build 178 buildrequest.save() 179 180 return build 181 182 def update_build(self, build, data_dict): 183 for key in data_dict: 184 setattr(build, key, data_dict[key]) 185 build.save() 186 187 @staticmethod 188 def get_or_create_targets(target_info): 189 """ 190 NB get_or_create() is used here because for Toaster-triggered builds, 191 we already created the targets when the build was triggered. 192 """ 193 result = [] 194 for target in target_info['targets']: 195 task = '' 196 if ':' in target: 197 target, task = target.split(':', 1) 198 if task.startswith('do_'): 199 task = task[3:] 200 if task == 'build': 201 task = '' 202 203 obj, _ = Target.objects.get_or_create(build=target_info['build'], 204 target=target, 205 task=task) 206 result.append(obj) 207 return result 208 209 def update_build_stats_and_outcome(self, build, errors, warnings, taskfailures): 210 assert isinstance(build,Build) 211 assert isinstance(errors, int) 212 assert isinstance(warnings, int) 213 214 if build.outcome == Build.CANCELLED: 215 return 216 try: 217 if build.buildrequest.state == BuildRequest.REQ_CANCELLING: 218 return 219 except AttributeError: 220 # We may not have a buildrequest if this is a command line build 221 pass 222 223 outcome = Build.SUCCEEDED 224 if errors or taskfailures: 225 outcome = Build.FAILED 226 227 build.completed_on = timezone.now() 228 build.outcome = outcome 229 build.save() 230 231 # We force a sync point here to force the outcome status commit, 232 # which resolves a race condition with the build completion takedown 233 transaction.set_autocommit(True) 234 transaction.set_autocommit(False) 235 236 signal_runbuilds() 237 238 def update_target_set_license_manifest(self, target, license_manifest_path): 239 target.license_manifest_path = license_manifest_path 240 target.save() 241 242 def update_target_set_package_manifest(self, target, package_manifest_path): 243 target.package_manifest_path = package_manifest_path 244 target.save() 245 246 def update_task_object(self, build, task_name, recipe_name, task_stats): 247 """ 248 Find the task for build which matches the recipe and task name 249 to be stored 250 """ 251 task_to_update = Task.objects.get( 252 build = build, 253 task_name = task_name, 254 recipe__name = recipe_name 255 ) 256 257 if 'started' in task_stats and 'ended' in task_stats: 258 task_to_update.started = self._timestamp_to_datetime(task_stats['started']) 259 task_to_update.ended = self._timestamp_to_datetime(task_stats['ended']) 260 task_to_update.elapsed_time = (task_stats['ended'] - task_stats['started']) 261 task_to_update.cpu_time_user = task_stats.get('cpu_time_user') 262 task_to_update.cpu_time_system = task_stats.get('cpu_time_system') 263 if 'disk_io_read' in task_stats and 'disk_io_write' in task_stats: 264 task_to_update.disk_io_read = task_stats['disk_io_read'] 265 task_to_update.disk_io_write = task_stats['disk_io_write'] 266 task_to_update.disk_io = task_stats['disk_io_read'] + task_stats['disk_io_write'] 267 268 task_to_update.save() 269 270 def get_update_task_object(self, task_information, must_exist = False): 271 assert 'build' in task_information 272 assert 'recipe' in task_information 273 assert 'task_name' in task_information 274 275 # we use must_exist info for database look-up optimization 276 task_object, created = self._cached_get_or_create(Task, 277 build=task_information['build'], 278 recipe=task_information['recipe'], 279 task_name=task_information['task_name'] 280 ) 281 if created and must_exist: 282 task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk) 283 raise NotExisting("Task object created when expected to exist", task_information) 284 285 object_changed = False 286 for v in vars(task_object): 287 if v in task_information.keys(): 288 if vars(task_object)[v] != task_information[v]: 289 vars(task_object)[v] = task_information[v] 290 object_changed = True 291 292 # update setscene-related information if the task has a setscene 293 if task_object.outcome == Task.OUTCOME_COVERED and 1 == task_object.get_related_setscene().count(): 294 task_object.outcome = Task.OUTCOME_CACHED 295 object_changed = True 296 297 outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build, 298 recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome 299 if outcome_task_setscene == Task.OUTCOME_SUCCESS: 300 task_object.sstate_result = Task.SSTATE_RESTORED 301 object_changed = True 302 elif outcome_task_setscene == Task.OUTCOME_FAILED: 303 task_object.sstate_result = Task.SSTATE_FAILED 304 object_changed = True 305 306 if object_changed: 307 task_object.save() 308 return task_object 309 310 311 def get_update_recipe_object(self, recipe_information, must_exist = False): 312 assert 'layer_version' in recipe_information 313 assert 'file_path' in recipe_information 314 assert 'pathflags' in recipe_information 315 316 assert not recipe_information['file_path'].startswith("/") # we should have layer-relative paths at all times 317 318 319 def update_recipe_obj(recipe_object): 320 object_changed = False 321 for v in vars(recipe_object): 322 if v in recipe_information.keys(): 323 object_changed = True 324 vars(recipe_object)[v] = recipe_information[v] 325 326 if object_changed: 327 recipe_object.save() 328 329 recipe, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'], 330 file_path=recipe_information['file_path'], pathflags = recipe_information['pathflags']) 331 332 update_recipe_obj(recipe) 333 334 built_recipe = None 335 # Create a copy of the recipe for historical puposes and update it 336 for built_layer in self.layer_version_built: 337 if built_layer.layer == recipe_information['layer_version'].layer: 338 built_recipe, c = self._cached_get_or_create(Recipe, 339 layer_version=built_layer, 340 file_path=recipe_information['file_path'], 341 pathflags = recipe_information['pathflags']) 342 update_recipe_obj(built_recipe) 343 break 344 345 346 # If we're in analysis mode or if this is a custom recipe 347 # then we are wholly responsible for the data 348 # and therefore we return the 'real' recipe rather than the build 349 # history copy of the recipe. 350 if recipe_information['layer_version'].build is not None and \ 351 recipe_information['layer_version'].build.project == \ 352 Project.objects.get_or_create_default_project(): 353 return recipe 354 355 if built_recipe is None: 356 return recipe 357 358 return built_recipe 359 360 def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information): 361 if isinstance(layer_obj, Layer_Version): 362 # We already found our layer version for this build so just 363 # update it with the new build information 364 logger.debug("We found our layer from toaster") 365 layer_obj.local_path = layer_version_information['local_path'] 366 layer_obj.save() 367 self.layer_version_objects.append(layer_obj) 368 369 # create a new copy of this layer version as a snapshot for 370 # historical purposes 371 layer_copy, c = Layer_Version.objects.get_or_create( 372 build=build_obj, 373 layer=layer_obj.layer, 374 release=layer_obj.release, 375 branch=layer_version_information['branch'], 376 commit=layer_version_information['commit'], 377 local_path=layer_version_information['local_path'], 378 ) 379 380 logger.debug("Created new layer version %s for build history", 381 layer_copy.layer.name) 382 383 self.layer_version_built.append(layer_copy) 384 385 return layer_obj 386 387 assert isinstance(build_obj, Build) 388 assert isinstance(layer_obj, Layer) 389 assert 'branch' in layer_version_information 390 assert 'commit' in layer_version_information 391 assert 'priority' in layer_version_information 392 assert 'local_path' in layer_version_information 393 394 # If we're doing a command line build then associate this new layer with the 395 # project to avoid it 'contaminating' toaster data 396 project = None 397 if build_obj.project == Project.objects.get_or_create_default_project(): 398 project = build_obj.project 399 400 layer_version_object, _ = Layer_Version.objects.get_or_create( 401 build = build_obj, 402 layer = layer_obj, 403 branch = layer_version_information['branch'], 404 commit = layer_version_information['commit'], 405 priority = layer_version_information['priority'], 406 local_path = layer_version_information['local_path'], 407 project=project) 408 409 self.layer_version_objects.append(layer_version_object) 410 411 return layer_version_object 412 413 def get_update_layer_object(self, layer_information, brbe): 414 assert 'name' in layer_information 415 assert 'layer_index_url' in layer_information 416 417 # From command line builds we have no brbe as the request is directly 418 # from bitbake 419 if brbe is None: 420 # If we don't have git commit sha then we're using a non-git 421 # layer so set the layer_source_dir to identify it as such 422 if not layer_information['version']['commit']: 423 local_source_dir = layer_information["local_path"] 424 else: 425 local_source_dir = None 426 427 layer_object, _ = \ 428 Layer.objects.get_or_create( 429 name=layer_information['name'], 430 local_source_dir=local_source_dir, 431 layer_index_url=layer_information['layer_index_url']) 432 433 return layer_object 434 else: 435 br_id, be_id = brbe.split(":") 436 437 # Find the layer version by matching the layer event information 438 # against the metadata we have in Toaster 439 440 try: 441 br_layer = BRLayer.objects.get(req=br_id, 442 name=layer_information['name']) 443 return br_layer.layer_version 444 except (BRLayer.MultipleObjectsReturned, BRLayer.DoesNotExist): 445 # There are multiple of the same layer name or the name 446 # hasn't been determined by the toaster.bbclass layer 447 # so let's filter by the local_path 448 bc = bbcontroller.getBuildEnvironmentController(pk=be_id) 449 for br_layer in BRLayer.objects.filter(req=br_id): 450 if br_layer.giturl and \ 451 layer_information['local_path'].endswith( 452 bc.getGitCloneDirectory(br_layer.giturl, 453 br_layer.commit)): 454 return br_layer.layer_version 455 456 if br_layer.local_source_dir == \ 457 layer_information['local_path']: 458 return br_layer.layer_version 459 460 # We've reached the end of our search and couldn't find the layer 461 # we can continue but some data may be missing 462 raise NotExisting("Unidentified layer %s" % 463 pformat(layer_information)) 464 465 def save_target_file_information(self, build_obj, target_obj, filedata): 466 assert isinstance(build_obj, Build) 467 assert isinstance(target_obj, Target) 468 dirs = filedata['dirs'] 469 files = filedata['files'] 470 syms = filedata['syms'] 471 472 # always create the root directory as a special case; 473 # note that this is never displayed, so the owner, group, 474 # size, permission are irrelevant 475 tf_obj = Target_File.objects.create(target = target_obj, 476 path = '/', 477 size = 0, 478 owner = '', 479 group = '', 480 permission = '', 481 inodetype = Target_File.ITYPE_DIRECTORY) 482 tf_obj.save() 483 484 # insert directories, ordered by name depth 485 for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))): 486 (user, group, size) = d[1:4] 487 permission = d[0][1:] 488 path = d[4].lstrip(".") 489 490 # we already created the root directory, so ignore any 491 # entry for it 492 if not path: 493 continue 494 495 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) 496 if not parent_path: 497 parent_path = "/" 498 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) 499 Target_File.objects.create( 500 target = target_obj, 501 path = path, 502 size = size, 503 inodetype = Target_File.ITYPE_DIRECTORY, 504 permission = permission, 505 owner = user, 506 group = group, 507 directory = parent_obj) 508 509 510 # we insert files 511 for d in files: 512 (user, group, size) = d[1:4] 513 permission = d[0][1:] 514 path = d[4].lstrip(".") 515 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) 516 inodetype = Target_File.ITYPE_REGULAR 517 if d[0].startswith('b'): 518 inodetype = Target_File.ITYPE_BLOCK 519 if d[0].startswith('c'): 520 inodetype = Target_File.ITYPE_CHARACTER 521 if d[0].startswith('p'): 522 inodetype = Target_File.ITYPE_FIFO 523 524 tf_obj = Target_File.objects.create( 525 target = target_obj, 526 path = path, 527 size = size, 528 inodetype = inodetype, 529 permission = permission, 530 owner = user, 531 group = group) 532 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) 533 tf_obj.directory = parent_obj 534 tf_obj.save() 535 536 # we insert symlinks 537 for d in syms: 538 (user, group, size) = d[1:4] 539 permission = d[0][1:] 540 path = d[4].lstrip(".") 541 filetarget_path = d[6] 542 543 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) 544 if not filetarget_path.startswith("/"): 545 # we have a relative path, get a normalized absolute one 546 filetarget_path = parent_path + "/" + filetarget_path 547 fcp = filetarget_path.split("/") 548 fcpl = [] 549 for i in fcp: 550 if i == "..": 551 fcpl.pop() 552 else: 553 fcpl.append(i) 554 filetarget_path = "/".join(fcpl) 555 556 try: 557 filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path) 558 except Target_File.DoesNotExist: 559 # we might have an invalid link; no way to detect this. just set it to None 560 filetarget_obj = None 561 562 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) 563 564 Target_File.objects.create( 565 target = target_obj, 566 path = path, 567 size = size, 568 inodetype = Target_File.ITYPE_SYMLINK, 569 permission = permission, 570 owner = user, 571 group = group, 572 directory = parent_obj, 573 sym_target = filetarget_obj) 574 575 576 def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes, built_package=False): 577 assert isinstance(build_obj, Build) 578 assert isinstance(target_obj, Target) 579 580 errormsg = [] 581 for p in packagedict: 582 # Search name swtiches round the installed name vs package name 583 # by default installed name == package name 584 searchname = p 585 if p not in pkgpnmap: 586 logger.warning("Image packages list contains %p, but is" 587 " missing from all packages list where the" 588 " metadata comes from. Skipping...", p) 589 continue 590 591 if 'OPKGN' in pkgpnmap[p].keys(): 592 searchname = pkgpnmap[p]['OPKGN'] 593 594 built_recipe = recipes[pkgpnmap[p]['PN']] 595 596 if built_package: 597 packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname ) 598 recipe = built_recipe 599 else: 600 packagedict[p]['object'], created = \ 601 CustomImagePackage.objects.get_or_create(name=searchname) 602 # Clear the Package_Dependency objects as we're going to update 603 # the CustomImagePackage with the latest dependency information 604 packagedict[p]['object'].package_dependencies_target.all().delete() 605 packagedict[p]['object'].package_dependencies_source.all().delete() 606 try: 607 recipe = self._cached_get( 608 Recipe, 609 name=built_recipe.name, 610 layer_version__build=None, 611 layer_version__release= 612 built_recipe.layer_version.release, 613 file_path=built_recipe.file_path, 614 version=built_recipe.version 615 ) 616 except (Recipe.DoesNotExist, 617 Recipe.MultipleObjectsReturned) as e: 618 logger.info("We did not find one recipe for the" 619 "configuration data package %s %s" % (p, e)) 620 continue 621 622 if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887] 623 # fill in everything we can from the runtime-reverse package data 624 try: 625 packagedict[p]['object'].recipe = recipe 626 packagedict[p]['object'].version = pkgpnmap[p]['PV'] 627 packagedict[p]['object'].installed_name = p 628 packagedict[p]['object'].revision = pkgpnmap[p]['PR'] 629 packagedict[p]['object'].license = pkgpnmap[p]['LICENSE'] 630 packagedict[p]['object'].section = pkgpnmap[p]['SECTION'] 631 packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY'] 632 packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION'] 633 packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE']) 634 635 # no files recorded for this package, so save files info 636 packagefile_objects = [] 637 for targetpath in pkgpnmap[p]['FILES_INFO']: 638 targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath] 639 packagefile_objects.append(Package_File( package = packagedict[p]['object'], 640 path = targetpath, 641 size = targetfilesize)) 642 if packagefile_objects: 643 Package_File.objects.bulk_create(packagefile_objects) 644 except KeyError as e: 645 errormsg.append(" stpi: Key error, package %s key %s \n" % (p, e)) 646 647 # save disk installed size 648 packagedict[p]['object'].installed_size = packagedict[p]['size'] 649 packagedict[p]['object'].save() 650 651 if built_package: 652 Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object']) 653 654 packagedeps_objs = [] 655 pattern_so = re.compile(r'.*\.so(\.\d*)?$') 656 pattern_lib = re.compile(r'.*\-suffix(\d*)?$') 657 pattern_ko = re.compile(r'^kernel-module-.*') 658 for p in packagedict: 659 for (px,deptype) in packagedict[p]['depends']: 660 if deptype == 'depends': 661 tdeptype = Package_Dependency.TYPE_TRDEPENDS 662 elif deptype == 'recommends': 663 tdeptype = Package_Dependency.TYPE_TRECOMMENDS 664 665 try: 666 # Skip known non-package objects like libraries and kernel modules 667 if pattern_so.match(px) or pattern_lib.match(px): 668 logger.info("Toaster does not add library file dependencies to packages (%s,%s)", p, px) 669 continue 670 if pattern_ko.match(px): 671 logger.info("Toaster does not add kernel module dependencies to packages (%s,%s)", p, px) 672 continue 673 packagedeps_objs.append(Package_Dependency( 674 package = packagedict[p]['object'], 675 depends_on = packagedict[px]['object'], 676 dep_type = tdeptype, 677 target = target_obj)) 678 except KeyError as e: 679 logger.warning("Could not add dependency to the package %s " 680 "because %s is an unknown package", p, px) 681 682 if packagedeps_objs: 683 Package_Dependency.objects.bulk_create(packagedeps_objs) 684 else: 685 logger.info("No package dependencies created") 686 687 if errormsg: 688 logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", "".join(errormsg)) 689 690 def save_target_image_file_information(self, target_obj, file_name, file_size): 691 Target_Image_File.objects.create(target=target_obj, 692 file_name=file_name, file_size=file_size) 693 694 def save_target_kernel_file(self, target_obj, file_name, file_size): 695 """ 696 Save kernel file (bzImage, modules*) information for a Target target_obj. 697 """ 698 TargetKernelFile.objects.create(target=target_obj, 699 file_name=file_name, file_size=file_size) 700 701 def save_target_sdk_file(self, target_obj, file_name, file_size): 702 """ 703 Save SDK artifacts to the database, associating them with a 704 Target object. 705 """ 706 TargetSDKFile.objects.create(target=target_obj, file_name=file_name, 707 file_size=file_size) 708 709 def create_logmessage(self, log_information): 710 assert 'build' in log_information 711 assert 'level' in log_information 712 assert 'message' in log_information 713 714 log_object = LogMessage.objects.create( 715 build = log_information['build'], 716 level = log_information['level'], 717 message = log_information['message']) 718 719 for v in vars(log_object): 720 if v in log_information.keys(): 721 vars(log_object)[v] = log_information[v] 722 723 return log_object.save() 724 725 726 def save_build_package_information(self, build_obj, package_info, recipes, 727 built_package): 728 # assert isinstance(build_obj, Build) 729 730 if not 'PN' in package_info.keys(): 731 # no package data to save (e.g. 'OPKGN'="lib64-*"|"lib32-*") 732 return None 733 734 # create and save the object 735 pname = package_info['PKG'] 736 built_recipe = recipes[package_info['PN']] 737 if 'OPKGN' in package_info.keys(): 738 pname = package_info['OPKGN'] 739 740 if built_package: 741 bp_object, _ = Package.objects.get_or_create( build = build_obj, 742 name = pname ) 743 recipe = built_recipe 744 else: 745 bp_object, created = \ 746 CustomImagePackage.objects.get_or_create(name=pname) 747 try: 748 recipe = self._cached_get(Recipe, 749 name=built_recipe.name, 750 layer_version__build=None, 751 file_path=built_recipe.file_path, 752 version=built_recipe.version) 753 754 except (Recipe.DoesNotExist, Recipe.MultipleObjectsReturned): 755 logger.debug("We did not find one recipe for the configuration" 756 "data package %s" % pname) 757 return 758 759 bp_object.installed_name = package_info['PKG'] 760 bp_object.recipe = recipe 761 bp_object.version = package_info['PKGV'] 762 bp_object.revision = package_info['PKGR'] 763 bp_object.summary = package_info['SUMMARY'] 764 bp_object.description = package_info['DESCRIPTION'] 765 bp_object.size = int(package_info['PKGSIZE']) 766 bp_object.section = package_info['SECTION'] 767 bp_object.license = package_info['LICENSE'] 768 bp_object.save() 769 770 # save any attached file information 771 packagefile_objects = [] 772 for path in package_info['FILES_INFO']: 773 packagefile_objects.append(Package_File( package = bp_object, 774 path = path, 775 size = package_info['FILES_INFO'][path] )) 776 if packagefile_objects: 777 Package_File.objects.bulk_create(packagefile_objects) 778 779 def _po_byname(p): 780 if built_package: 781 pkg, created = Package.objects.get_or_create(build=build_obj, 782 name=p) 783 else: 784 pkg, created = CustomImagePackage.objects.get_or_create(name=p) 785 786 if created: 787 pkg.size = -1 788 pkg.save() 789 return pkg 790 791 packagedeps_objs = [] 792 # save soft dependency information 793 if 'RDEPENDS' in package_info and package_info['RDEPENDS']: 794 for p in bb.utils.explode_deps(package_info['RDEPENDS']): 795 packagedeps_objs.append(Package_Dependency( package = bp_object, 796 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS)) 797 if 'RPROVIDES' in package_info and package_info['RPROVIDES']: 798 for p in bb.utils.explode_deps(package_info['RPROVIDES']): 799 packagedeps_objs.append(Package_Dependency( package = bp_object, 800 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES)) 801 if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']: 802 for p in bb.utils.explode_deps(package_info['RRECOMMENDS']): 803 packagedeps_objs.append(Package_Dependency( package = bp_object, 804 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS)) 805 if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']: 806 for p in bb.utils.explode_deps(package_info['RSUGGESTS']): 807 packagedeps_objs.append(Package_Dependency( package = bp_object, 808 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS)) 809 if 'RREPLACES' in package_info and package_info['RREPLACES']: 810 for p in bb.utils.explode_deps(package_info['RREPLACES']): 811 packagedeps_objs.append(Package_Dependency( package = bp_object, 812 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES)) 813 if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']: 814 for p in bb.utils.explode_deps(package_info['RCONFLICTS']): 815 packagedeps_objs.append(Package_Dependency( package = bp_object, 816 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)) 817 818 if packagedeps_objs: 819 Package_Dependency.objects.bulk_create(packagedeps_objs) 820 821 return bp_object 822 823 def save_build_variables(self, build_obj, vardump): 824 assert isinstance(build_obj, Build) 825 826 for k in vardump: 827 desc = vardump[k]['doc'] 828 if desc is None: 829 var_words = [word for word in k.split('_')] 830 root_var = "_".join([word for word in var_words if word.isupper()]) 831 if root_var and root_var != k and root_var in vardump: 832 desc = vardump[root_var]['doc'] 833 if desc is None: 834 desc = '' 835 if desc: 836 HelpText.objects.get_or_create(build=build_obj, 837 area=HelpText.VARIABLE, 838 key=k, text=desc) 839 if not bool(vardump[k]['func']): 840 value = vardump[k]['v'] 841 if value is None: 842 value = '' 843 variable_obj = Variable.objects.create( build = build_obj, 844 variable_name = k, 845 variable_value = value, 846 description = desc) 847 848 varhist_objects = [] 849 for vh in vardump[k]['history']: 850 if not 'documentation.conf' in vh['file']: 851 varhist_objects.append(VariableHistory( variable = variable_obj, 852 file_name = vh['file'], 853 line_number = vh['line'], 854 operation = vh['op'])) 855 if varhist_objects: 856 VariableHistory.objects.bulk_create(varhist_objects) 857 858 859class MockEvent(object): 860 """ This object is used to create event, for which normal event-processing methods can 861 be used, out of data that is not coming via an actual event 862 """ 863 def __init__(self): 864 self.msg = None 865 self.levelno = None 866 self.taskname = None 867 self.taskhash = None 868 self.pathname = None 869 self.lineno = None 870 871 def getMessage(self): 872 """ 873 Simulate LogRecord message return 874 """ 875 return self.msg 876 877 878class BuildInfoHelper(object): 879 """ This class gathers the build information from the server and sends it 880 towards the ORM wrapper for storing in the database 881 It is instantiated once per build 882 Keeps in memory all data that needs matching before writing it to the database 883 """ 884 885 # tasks which produce image files; note we include '', as we set 886 # the task for a target to '' (i.e. 'build') if no target is 887 # explicitly defined 888 IMAGE_GENERATING_TASKS = ['', 'build', 'image', 'populate_sdk_ext'] 889 890 # pylint: disable=protected-access 891 # the code will look into the protected variables of the event; no easy way around this 892 # pylint: disable=bad-continuation 893 # we do not follow the python conventions for continuation indentation due to long lines here 894 895 def __init__(self, server, has_build_history = False, brbe = None): 896 self.internal_state = {} 897 self.internal_state['taskdata'] = {} 898 self.internal_state['targets'] = [] 899 self.task_order = 0 900 self.autocommit_step = 1 901 self.server = server 902 self.orm_wrapper = ORMWrapper() 903 self.has_build_history = has_build_history 904 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0] 905 906 # this is set for Toaster-triggered builds by localhostbecontroller 907 # via toasterui 908 self.brbe = brbe 909 910 self.project = None 911 912 logger.debug("buildinfohelper: Build info helper inited %s" % vars(self)) 913 914 915 ################### 916 ## methods to convert event/external info into objects that the ORM layer uses 917 918 def _ensure_build(self): 919 """ 920 Ensure the current build object exists and is up to date with 921 data on the bitbake server 922 """ 923 if not 'build' in self.internal_state or not self.internal_state['build']: 924 # create the Build object 925 self.internal_state['build'] = \ 926 self.orm_wrapper.get_or_create_build_object(self.brbe) 927 928 build = self.internal_state['build'] 929 930 # update missing fields on the Build object with found data 931 build_info = {} 932 933 # set to True if at least one field is going to be set 934 changed = False 935 936 if not build.build_name: 937 build_name = self.server.runCommand(["getVariable", "BUILDNAME"])[0] 938 939 # only reset the build name if the one on the server is actually 940 # a valid value for the build_name field 941 if build_name is not None: 942 build_info['build_name'] = build_name 943 changed = True 944 945 if not build.machine: 946 build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0] 947 changed = True 948 949 if not build.distro: 950 build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0] 951 changed = True 952 953 if not build.distro_version: 954 build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0] 955 changed = True 956 957 if not build.bitbake_version: 958 build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0] 959 changed = True 960 961 if changed: 962 self.orm_wrapper.update_build(self.internal_state['build'], build_info) 963 964 def _get_task_information(self, event, recipe): 965 assert 'taskname' in vars(event) 966 self._ensure_build() 967 968 task_information = {} 969 task_information['build'] = self.internal_state['build'] 970 task_information['outcome'] = Task.OUTCOME_NA 971 task_information['recipe'] = recipe 972 task_information['task_name'] = event.taskname 973 try: 974 # some tasks don't come with a hash. and that's ok 975 task_information['sstate_checksum'] = event.taskhash 976 except AttributeError: 977 pass 978 return task_information 979 980 def _get_layer_version_for_dependency(self, pathRE): 981 """ Returns the layer in the toaster db that has a full regex 982 match to the pathRE. pathRE - the layer path passed as a regex in the 983 event. It is created in cooker.py as a collection for the layer 984 priorities. 985 """ 986 self._ensure_build() 987 988 def _sort_longest_path(layer_version): 989 assert isinstance(layer_version, Layer_Version) 990 return len(layer_version.local_path) 991 992 # Our paths don't append a trailing slash 993 if pathRE.endswith("/"): 994 pathRE = pathRE[:-1] 995 996 p = re.compile(pathRE) 997 path=re.sub(r'[$^]',r'',pathRE) 998 # Heuristics: we always match recipe to the deepest layer path in 999 # the discovered layers 1000 for lvo in sorted(self.orm_wrapper.layer_version_objects, 1001 reverse=True, key=_sort_longest_path): 1002 if p.fullmatch(os.path.abspath(lvo.local_path)): 1003 return lvo 1004 if lvo.layer.local_source_dir: 1005 if p.fullmatch(os.path.abspath(lvo.layer.local_source_dir)): 1006 return lvo 1007 if 0 == path.find(lvo.local_path): 1008 # sub-layer path inside existing layer 1009 return lvo 1010 1011 # if we get here, we didn't read layers correctly; 1012 # dump whatever information we have on the error log 1013 logger.warning("Could not match layer dependency for path %s : %s", 1014 pathRE, 1015 self.orm_wrapper.layer_version_objects) 1016 return None 1017 1018 def _get_layer_version_for_path(self, path): 1019 self._ensure_build() 1020 1021 def _slkey_interactive(layer_version): 1022 assert isinstance(layer_version, Layer_Version) 1023 return len(layer_version.local_path) 1024 1025 # Heuristics: we always match recipe to the deepest layer path in the discovered layers 1026 for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive): 1027 # we can match to the recipe file path 1028 if path.startswith(lvo.local_path): 1029 return lvo 1030 if lvo.layer.local_source_dir and \ 1031 path.startswith(lvo.layer.local_source_dir): 1032 return lvo 1033 1034 #if we get here, we didn't read layers correctly; dump whatever information we have on the error log 1035 logger.warning("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects) 1036 1037 #mockup the new layer 1038 unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="") 1039 unknown_layer_version_obj, _ = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build']) 1040 1041 # append it so we don't run into this error again and again 1042 self.orm_wrapper.layer_version_objects.append(unknown_layer_version_obj) 1043 1044 return unknown_layer_version_obj 1045 1046 def _get_recipe_information_from_taskfile(self, taskfile): 1047 localfilepath = taskfile.split(":")[-1] 1048 filepath_flags = ":".join(sorted(taskfile.split(":")[:-1])) 1049 layer_version_obj = self._get_layer_version_for_path(localfilepath) 1050 1051 1052 1053 recipe_info = {} 1054 recipe_info['layer_version'] = layer_version_obj 1055 recipe_info['file_path'] = localfilepath 1056 recipe_info['pathflags'] = filepath_flags 1057 1058 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path): 1059 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/") 1060 else: 1061 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path)) 1062 1063 return recipe_info 1064 1065 1066 ################################ 1067 ## external available methods to store information 1068 @staticmethod 1069 def _get_data_from_event(event): 1070 evdata = None 1071 if '_localdata' in vars(event): 1072 evdata = event._localdata 1073 elif 'data' in vars(event): 1074 evdata = event.data 1075 else: 1076 raise Exception("Event with neither _localdata or data properties") 1077 return evdata 1078 1079 def store_layer_info(self, event): 1080 layerinfos = BuildInfoHelper._get_data_from_event(event) 1081 self.internal_state['lvs'] = {} 1082 for layer in layerinfos: 1083 try: 1084 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version'] 1085 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path'] 1086 except NotExisting as nee: 1087 logger.warning("buildinfohelper: cannot identify layer exception:%s ", nee) 1088 1089 def store_started_build(self): 1090 self._ensure_build() 1091 1092 def save_build_log_file_path(self, build_log_path): 1093 self._ensure_build() 1094 1095 if not self.internal_state['build'].cooker_log_path: 1096 data_dict = {'cooker_log_path': build_log_path} 1097 self.orm_wrapper.update_build(self.internal_state['build'], data_dict) 1098 1099 def save_build_targets(self, event): 1100 self._ensure_build() 1101 1102 # create target information 1103 assert '_pkgs' in vars(event) 1104 target_information = {} 1105 target_information['targets'] = event._pkgs 1106 target_information['build'] = self.internal_state['build'] 1107 1108 self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information) 1109 1110 def save_build_layers_and_variables(self): 1111 self._ensure_build() 1112 1113 build_obj = self.internal_state['build'] 1114 1115 # save layer version information for this build 1116 if not 'lvs' in self.internal_state: 1117 logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.") 1118 else: 1119 for layer_obj in self.internal_state['lvs']: 1120 self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj]) 1121 1122 del self.internal_state['lvs'] 1123 1124 # Save build configuration 1125 data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0] 1126 1127 # convert the paths from absolute to relative to either the build directory or layer checkouts 1128 path_prefixes = [] 1129 1130 if self.brbe is not None: 1131 _, be_id = self.brbe.split(":") 1132 be = BuildEnvironment.objects.get(pk = be_id) 1133 path_prefixes.append(be.builddir) 1134 1135 for layer in sorted(self.orm_wrapper.layer_version_objects, key = lambda x:len(x.local_path), reverse=True): 1136 path_prefixes.append(layer.local_path) 1137 1138 # we strip the prefixes 1139 for k in data: 1140 if not bool(data[k]['func']): 1141 for vh in data[k]['history']: 1142 if not 'documentation.conf' in vh['file']: 1143 abs_file_name = vh['file'] 1144 for pp in path_prefixes: 1145 if abs_file_name.startswith(pp + "/"): 1146 # preserve layer name in relative path 1147 vh['file']=abs_file_name[pp.rfind("/")+1:] 1148 break 1149 1150 # save the variables 1151 self.orm_wrapper.save_build_variables(build_obj, data) 1152 1153 return self.brbe 1154 1155 def set_recipes_to_parse(self, num_recipes): 1156 """ 1157 Set the number of recipes which need to be parsed for this build. 1158 This is set the first time ParseStarted is received by toasterui. 1159 """ 1160 self._ensure_build() 1161 self.internal_state['build'].recipes_to_parse = num_recipes 1162 self.internal_state['build'].save() 1163 1164 def set_recipes_parsed(self, num_recipes): 1165 """ 1166 Set the number of recipes parsed so far for this build; this is updated 1167 each time a ParseProgress or ParseCompleted event is received by 1168 toasterui. 1169 """ 1170 self._ensure_build() 1171 if num_recipes <= self.internal_state['build'].recipes_to_parse: 1172 self.internal_state['build'].recipes_parsed = num_recipes 1173 self.internal_state['build'].save() 1174 1175 def update_target_image_file(self, event): 1176 evdata = BuildInfoHelper._get_data_from_event(event) 1177 1178 for t in self.internal_state['targets']: 1179 if t.is_image: 1180 output_files = list(evdata.keys()) 1181 for output in output_files: 1182 if t.target in output and 'rootfs' in output and not output.endswith(".manifest"): 1183 self.orm_wrapper.save_target_image_file_information(t, output, evdata[output]) 1184 1185 def update_artifact_image_file(self, event): 1186 self._ensure_build() 1187 evdata = BuildInfoHelper._get_data_from_event(event) 1188 for artifact_path in evdata.keys(): 1189 self.orm_wrapper.save_artifact_information( 1190 self.internal_state['build'], artifact_path, 1191 evdata[artifact_path]) 1192 1193 def update_build_information(self, event, errors, warnings, taskfailures): 1194 self._ensure_build() 1195 self.orm_wrapper.update_build_stats_and_outcome( 1196 self.internal_state['build'], errors, warnings, taskfailures) 1197 1198 def store_started_task(self, event): 1199 assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped)) 1200 assert 'taskfile' in vars(event) 1201 localfilepath = event.taskfile.split(":")[-1] 1202 assert localfilepath.startswith("/") 1203 1204 identifier = event.taskfile + ":" + event.taskname 1205 1206 recipe_information = self._get_recipe_information_from_taskfile(event.taskfile) 1207 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True) 1208 1209 task_information = self._get_task_information(event, recipe) 1210 task_information['outcome'] = Task.OUTCOME_NA 1211 1212 if isinstance(event, bb.runqueue.runQueueTaskSkipped): 1213 assert 'reason' in vars(event) 1214 task_information['task_executed'] = False 1215 if event.reason == "covered": 1216 task_information['outcome'] = Task.OUTCOME_COVERED 1217 if event.reason == "existing": 1218 task_information['outcome'] = Task.OUTCOME_PREBUILT 1219 else: 1220 task_information['task_executed'] = True 1221 if 'noexec' in vars(event) and event.noexec: 1222 task_information['task_executed'] = False 1223 task_information['outcome'] = Task.OUTCOME_EMPTY 1224 task_information['script_type'] = Task.CODING_NA 1225 1226 # do not assign order numbers to scene tasks 1227 if not isinstance(event, bb.runqueue.sceneQueueTaskStarted): 1228 self.task_order += 1 1229 task_information['order'] = self.task_order 1230 1231 self.orm_wrapper.get_update_task_object(task_information) 1232 1233 self.internal_state['taskdata'][identifier] = { 1234 'outcome': task_information['outcome'], 1235 } 1236 1237 1238 def store_tasks_stats(self, event): 1239 self._ensure_build() 1240 task_data = BuildInfoHelper._get_data_from_event(event) 1241 1242 for (task_file, task_name, task_stats, recipe_name) in task_data: 1243 build = self.internal_state['build'] 1244 self.orm_wrapper.update_task_object(build, task_name, recipe_name, task_stats) 1245 1246 def update_and_store_task(self, event): 1247 assert 'taskfile' in vars(event) 1248 localfilepath = event.taskfile.split(":")[-1] 1249 assert localfilepath.startswith("/") 1250 1251 identifier = event.taskfile + ":" + event.taskname 1252 if not identifier in self.internal_state['taskdata']: 1253 if isinstance(event, bb.build.TaskBase): 1254 # we do a bit of guessing 1255 candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)] 1256 if len(candidates) == 1: 1257 identifier = candidates[0] 1258 elif len(candidates) > 1 and hasattr(event,'_package'): 1259 if 'native-' in event._package: 1260 identifier = 'native:' + identifier 1261 if 'nativesdk-' in event._package: 1262 identifier = 'nativesdk:' + identifier 1263 candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)] 1264 if len(candidates) == 1: 1265 identifier = candidates[0] 1266 1267 assert identifier in self.internal_state['taskdata'] 1268 identifierlist = identifier.split(":") 1269 realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1]) 1270 recipe_information = self._get_recipe_information_from_taskfile(realtaskfile) 1271 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True) 1272 task_information = self._get_task_information(event,recipe) 1273 1274 task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome'] 1275 1276 if 'logfile' in vars(event): 1277 task_information['logfile'] = event.logfile 1278 1279 if '_message' in vars(event): 1280 task_information['message'] = event._message 1281 1282 if 'taskflags' in vars(event): 1283 # with TaskStarted, we get even more information 1284 if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1': 1285 task_information['script_type'] = Task.CODING_PYTHON 1286 else: 1287 task_information['script_type'] = Task.CODING_SHELL 1288 1289 if task_information['outcome'] == Task.OUTCOME_NA: 1290 if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)): 1291 task_information['outcome'] = Task.OUTCOME_SUCCESS 1292 del self.internal_state['taskdata'][identifier] 1293 1294 if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)): 1295 task_information['outcome'] = Task.OUTCOME_FAILED 1296 del self.internal_state['taskdata'][identifier] 1297 1298 # we force a sync point here, to get the progress bar to show 1299 if self.autocommit_step % 3 == 0: 1300 transaction.set_autocommit(True) 1301 transaction.set_autocommit(False) 1302 self.autocommit_step += 1 1303 1304 self.orm_wrapper.get_update_task_object(task_information, True) # must exist 1305 1306 1307 def store_missed_state_tasks(self, event): 1308 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']: 1309 1310 # identifier = fn + taskname + "_setscene" 1311 recipe_information = self._get_recipe_information_from_taskfile(fn) 1312 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information) 1313 mevent = MockEvent() 1314 mevent.taskname = taskname 1315 mevent.taskhash = taskhash 1316 task_information = self._get_task_information(mevent,recipe) 1317 1318 task_information['start_time'] = timezone.now() 1319 task_information['outcome'] = Task.OUTCOME_NA 1320 task_information['sstate_checksum'] = taskhash 1321 task_information['sstate_result'] = Task.SSTATE_MISS 1322 task_information['path_to_sstate_obj'] = sstatefile 1323 1324 self.orm_wrapper.get_update_task_object(task_information) 1325 1326 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']: 1327 1328 # identifier = fn + taskname + "_setscene" 1329 recipe_information = self._get_recipe_information_from_taskfile(fn) 1330 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information) 1331 mevent = MockEvent() 1332 mevent.taskname = taskname 1333 mevent.taskhash = taskhash 1334 task_information = self._get_task_information(mevent,recipe) 1335 1336 task_information['path_to_sstate_obj'] = sstatefile 1337 1338 self.orm_wrapper.get_update_task_object(task_information) 1339 1340 1341 def store_target_package_data(self, event): 1342 self._ensure_build() 1343 1344 # for all image targets 1345 for target in self.internal_state['targets']: 1346 if target.is_image: 1347 pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata'] 1348 imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'].get(target.target, {}) 1349 filedata = BuildInfoHelper._get_data_from_event(event)['filedata'].get(target.target, {}) 1350 1351 try: 1352 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'], built_package=True) 1353 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata.copy(), pkgdata, self.internal_state['recipes'], built_package=False) 1354 except KeyError as e: 1355 logger.warning("KeyError in save_target_package_information" 1356 "%s ", e) 1357 1358 # only try to find files in the image if the task for this 1359 # target is one which produces image files; otherwise, the old 1360 # list of files in the files-in-image.txt file will be 1361 # appended to the target even if it didn't produce any images 1362 if target.task in BuildInfoHelper.IMAGE_GENERATING_TASKS: 1363 try: 1364 self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata) 1365 except KeyError as e: 1366 logger.warning("KeyError in save_target_file_information" 1367 "%s ", e) 1368 1369 1370 1371 def cancel_cli_build(self): 1372 """ 1373 If a build is currently underway, set its state to CANCELLED; 1374 note that this only gets called for command line builds which are 1375 interrupted, so it doesn't touch any BuildRequest objects 1376 """ 1377 self._ensure_build() 1378 self.internal_state['build'].outcome = Build.CANCELLED 1379 self.internal_state['build'].save() 1380 signal_runbuilds() 1381 1382 def store_dependency_information(self, event): 1383 assert '_depgraph' in vars(event) 1384 assert 'layer-priorities' in event._depgraph 1385 assert 'pn' in event._depgraph 1386 assert 'tdepends' in event._depgraph 1387 1388 errormsg = [] 1389 1390 # save layer version priorities 1391 if 'layer-priorities' in event._depgraph.keys(): 1392 for lv in event._depgraph['layer-priorities']: 1393 (_, path, _, priority) = lv 1394 layer_version_obj = self._get_layer_version_for_dependency(path) 1395 if layer_version_obj: 1396 layer_version_obj.priority = priority 1397 layer_version_obj.save() 1398 1399 # save recipe information 1400 self.internal_state['recipes'] = {} 1401 for pn in event._depgraph['pn']: 1402 1403 file_name = event._depgraph['pn'][pn]['filename'].split(":")[-1] 1404 pathflags = ":".join(sorted(event._depgraph['pn'][pn]['filename'].split(":")[:-1])) 1405 layer_version_obj = self._get_layer_version_for_path(file_name) 1406 1407 assert layer_version_obj is not None 1408 1409 recipe_info = {} 1410 recipe_info['name'] = pn 1411 recipe_info['layer_version'] = layer_version_obj 1412 1413 if 'version' in event._depgraph['pn'][pn]: 1414 recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":") 1415 1416 if 'summary' in event._depgraph['pn'][pn]: 1417 recipe_info['summary'] = event._depgraph['pn'][pn]['summary'] 1418 1419 if 'license' in event._depgraph['pn'][pn]: 1420 recipe_info['license'] = event._depgraph['pn'][pn]['license'] 1421 1422 if 'description' in event._depgraph['pn'][pn]: 1423 recipe_info['description'] = event._depgraph['pn'][pn]['description'] 1424 1425 if 'section' in event._depgraph['pn'][pn]: 1426 recipe_info['section'] = event._depgraph['pn'][pn]['section'] 1427 1428 if 'homepage' in event._depgraph['pn'][pn]: 1429 recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage'] 1430 1431 if 'bugtracker' in event._depgraph['pn'][pn]: 1432 recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker'] 1433 1434 recipe_info['file_path'] = file_name 1435 recipe_info['pathflags'] = pathflags 1436 1437 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path): 1438 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/") 1439 else: 1440 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path)) 1441 1442 recipe = self.orm_wrapper.get_update_recipe_object(recipe_info) 1443 recipe.is_image = False 1444 if 'inherits' in event._depgraph['pn'][pn].keys(): 1445 for cls in event._depgraph['pn'][pn]['inherits']: 1446 if cls.endswith('/image.bbclass'): 1447 recipe.is_image = True 1448 recipe_info['is_image'] = True 1449 # Save the is_image state to the relevant recipe objects 1450 self.orm_wrapper.get_update_recipe_object(recipe_info) 1451 break 1452 if recipe.is_image: 1453 for t in self.internal_state['targets']: 1454 if pn == t.target: 1455 t.is_image = True 1456 t.save() 1457 self.internal_state['recipes'][pn] = recipe 1458 1459 # we'll not get recipes for key w/ values listed in ASSUME_PROVIDED 1460 1461 assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split() 1462 1463 # save recipe dependency 1464 # buildtime 1465 recipedeps_objects = [] 1466 for recipe in event._depgraph['depends']: 1467 target = self.internal_state['recipes'][recipe] 1468 for dep in event._depgraph['depends'][recipe]: 1469 if dep in assume_provided: 1470 continue 1471 via = None 1472 if 'providermap' in event._depgraph and dep in event._depgraph['providermap']: 1473 deprecipe = event._depgraph['providermap'][dep][0] 1474 dependency = self.internal_state['recipes'][deprecipe] 1475 via = Provides.objects.get_or_create(name=dep, 1476 recipe=dependency)[0] 1477 elif dep in self.internal_state['recipes']: 1478 dependency = self.internal_state['recipes'][dep] 1479 else: 1480 errormsg.append(" stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep)) 1481 continue 1482 recipe_dep = Recipe_Dependency(recipe=target, 1483 depends_on=dependency, 1484 via=via, 1485 dep_type=Recipe_Dependency.TYPE_DEPENDS) 1486 recipedeps_objects.append(recipe_dep) 1487 1488 Recipe_Dependency.objects.bulk_create(recipedeps_objects) 1489 1490 # save all task information 1491 def _save_a_task(taskdesc): 1492 spec = re.split(r'\.', taskdesc) 1493 pn = ".".join(spec[0:-1]) 1494 taskname = spec[-1] 1495 e = event 1496 e.taskname = pn 1497 recipe = self.internal_state['recipes'][pn] 1498 task_info = self._get_task_information(e, recipe) 1499 task_info['task_name'] = taskname 1500 task_obj = self.orm_wrapper.get_update_task_object(task_info) 1501 return task_obj 1502 1503 # create tasks 1504 tasks = {} 1505 for taskdesc in event._depgraph['tdepends']: 1506 tasks[taskdesc] = _save_a_task(taskdesc) 1507 1508 # create dependencies between tasks 1509 taskdeps_objects = [] 1510 for taskdesc in event._depgraph['tdepends']: 1511 target = tasks[taskdesc] 1512 for taskdep in event._depgraph['tdepends'][taskdesc]: 1513 if taskdep not in tasks: 1514 # Fetch tasks info is not collected previously 1515 dep = _save_a_task(taskdep) 1516 else: 1517 dep = tasks[taskdep] 1518 taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep )) 1519 Task_Dependency.objects.bulk_create(taskdeps_objects) 1520 1521 if errormsg: 1522 logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", "".join(errormsg)) 1523 1524 1525 def store_build_package_information(self, event): 1526 self._ensure_build() 1527 1528 package_info = BuildInfoHelper._get_data_from_event(event) 1529 self.orm_wrapper.save_build_package_information( 1530 self.internal_state['build'], 1531 package_info, 1532 self.internal_state['recipes'], 1533 built_package=True) 1534 1535 self.orm_wrapper.save_build_package_information( 1536 self.internal_state['build'], 1537 package_info, 1538 self.internal_state['recipes'], 1539 built_package=False) 1540 1541 def _store_build_done(self, errorcode): 1542 logger.info("Build exited with errorcode %d", errorcode) 1543 1544 if not self.brbe: 1545 return 1546 1547 br_id, be_id = self.brbe.split(":") 1548 1549 br = BuildRequest.objects.get(pk = br_id) 1550 1551 # if we're 'done' because we got cancelled update the build outcome 1552 if br.state == BuildRequest.REQ_CANCELLING: 1553 logger.info("Build cancelled") 1554 br.build.outcome = Build.CANCELLED 1555 br.build.save() 1556 self.internal_state['build'] = br.build 1557 errorcode = 0 1558 1559 if errorcode == 0: 1560 # request archival of the project artifacts 1561 br.state = BuildRequest.REQ_COMPLETED 1562 else: 1563 br.state = BuildRequest.REQ_FAILED 1564 br.save() 1565 1566 be = BuildEnvironment.objects.get(pk = be_id) 1567 be.lock = BuildEnvironment.LOCK_FREE 1568 be.save() 1569 signal_runbuilds() 1570 1571 def store_log_error(self, text): 1572 mockevent = MockEvent() 1573 mockevent.levelno = formatter.ERROR 1574 mockevent.msg = text 1575 mockevent.pathname = '-- None' 1576 mockevent.lineno = LogMessage.ERROR 1577 self.store_log_event(mockevent) 1578 1579 def store_log_exception(self, text, backtrace = ""): 1580 mockevent = MockEvent() 1581 mockevent.levelno = -1 1582 mockevent.msg = text 1583 mockevent.pathname = backtrace 1584 mockevent.lineno = -1 1585 self.store_log_event(mockevent) 1586 1587 def store_log_event(self, event,cli_backlog=True): 1588 self._ensure_build() 1589 1590 if event.levelno < formatter.WARNING: 1591 return 1592 1593 # early return for CLI builds 1594 if cli_backlog and self.brbe is None: 1595 if not 'backlog' in self.internal_state: 1596 self.internal_state['backlog'] = [] 1597 self.internal_state['backlog'].append(event) 1598 return 1599 1600 if 'backlog' in self.internal_state: 1601 # if we have a backlog of events, do our best to save them here 1602 if self.internal_state['backlog']: 1603 tempevent = self.internal_state['backlog'].pop() 1604 logger.debug("buildinfohelper: Saving stored event %s " 1605 % tempevent) 1606 self.store_log_event(tempevent,cli_backlog) 1607 else: 1608 logger.info("buildinfohelper: All events saved") 1609 del self.internal_state['backlog'] 1610 1611 log_information = {} 1612 log_information['build'] = self.internal_state['build'] 1613 if event.levelno == formatter.CRITICAL: 1614 log_information['level'] = LogMessage.CRITICAL 1615 elif event.levelno == formatter.ERROR: 1616 log_information['level'] = LogMessage.ERROR 1617 elif event.levelno == formatter.WARNING: 1618 log_information['level'] = LogMessage.WARNING 1619 elif event.levelno == -2: # toaster self-logging 1620 log_information['level'] = -2 1621 else: 1622 log_information['level'] = LogMessage.INFO 1623 1624 log_information['message'] = event.getMessage() 1625 log_information['pathname'] = event.pathname 1626 log_information['lineno'] = event.lineno 1627 logger.info("Logging error 2: %s", log_information) 1628 1629 self.orm_wrapper.create_logmessage(log_information) 1630 1631 def _get_filenames_from_image_license(self, image_license_manifest_path): 1632 """ 1633 Find the FILES line in the image_license.manifest file, 1634 which has the basenames of the bzImage and modules files 1635 in this format: 1636 FILES: bzImage--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.bin modules--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.tgz 1637 """ 1638 files = [] 1639 with open(image_license_manifest_path) as image_license: 1640 for line in image_license: 1641 if line.startswith('FILES'): 1642 files_str = line.split(':')[1].strip() 1643 files_str = re.sub(r' {2,}', ' ', files_str) 1644 1645 # ignore lines like "FILES:" with no filenames 1646 if files_str: 1647 files += files_str.split(' ') 1648 return files 1649 1650 def _endswith(self, str_to_test, endings): 1651 """ 1652 Returns True if str ends with one of the strings in the list 1653 endings, False otherwise 1654 """ 1655 endswith = False 1656 for ending in endings: 1657 if str_to_test.endswith(ending): 1658 endswith = True 1659 break 1660 return endswith 1661 1662 def scan_task_artifacts(self, event): 1663 """ 1664 The 'TaskArtifacts' event passes the manifest file content for the 1665 tasks 'do_deploy', 'do_image_complete', 'do_populate_sdk', and 1666 'do_populate_sdk_ext'. The first two will be implemented later. 1667 """ 1668 task_vars = BuildInfoHelper._get_data_from_event(event) 1669 task_name = task_vars['task'][task_vars['task'].find(':')+1:] 1670 task_artifacts = task_vars['artifacts'] 1671 1672 if task_name in ['do_populate_sdk', 'do_populate_sdk_ext']: 1673 targets = [target for target in self.internal_state['targets'] \ 1674 if target.task == task_name[3:]] 1675 if not targets: 1676 logger.warning("scan_task_artifacts: SDK targets not found: %s\n", task_name) 1677 return 1678 for artifact_path in task_artifacts: 1679 if not os.path.isfile(artifact_path): 1680 logger.warning("scan_task_artifacts: artifact file not found: %s\n", artifact_path) 1681 continue 1682 for target in targets: 1683 # don't record the file if it's already been added 1684 # to this target 1685 matching_files = TargetSDKFile.objects.filter( 1686 target=target, file_name=artifact_path) 1687 if matching_files.count() == 0: 1688 artifact_size = os.stat(artifact_path).st_size 1689 self.orm_wrapper.save_target_sdk_file( 1690 target, artifact_path, artifact_size) 1691 1692 def _get_image_files(self, deploy_dir_image, image_name, image_file_extensions): 1693 """ 1694 Find files in deploy_dir_image whose basename starts with the 1695 string image_name and ends with one of the strings in 1696 image_file_extensions. 1697 1698 Returns a list of file dictionaries like 1699 1700 [ 1701 { 1702 'path': '/path/to/image/file', 1703 'size': <file size in bytes> 1704 } 1705 ] 1706 """ 1707 image_files = [] 1708 1709 for dirpath, _, filenames in os.walk(deploy_dir_image): 1710 for filename in filenames: 1711 if filename.startswith(image_name) and \ 1712 self._endswith(filename, image_file_extensions): 1713 image_file_path = os.path.join(dirpath, filename) 1714 image_file_size = os.stat(image_file_path).st_size 1715 1716 image_files.append({ 1717 'path': image_file_path, 1718 'size': image_file_size 1719 }) 1720 1721 return image_files 1722 1723 def scan_image_artifacts(self): 1724 """ 1725 Scan for built image artifacts in DEPLOY_DIR_IMAGE and associate them 1726 with a Target object in self.internal_state['targets']. 1727 1728 We have two situations to handle: 1729 1730 1. This is the first time a target + machine has been built, so 1731 add files from the DEPLOY_DIR_IMAGE to the target. 1732 1733 OR 1734 1735 2. There are no new files for the target (they were already produced by 1736 a previous build), so copy them from the most recent previous build with 1737 the same target, task and machine. 1738 """ 1739 deploy_dir_image = \ 1740 self.server.runCommand(['getVariable', 'DEPLOY_DIR_IMAGE'])[0] 1741 1742 # if there's no DEPLOY_DIR_IMAGE, there aren't going to be 1743 # any image artifacts, so we can return immediately 1744 if not deploy_dir_image: 1745 return 1746 1747 buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0] 1748 machine = self.server.runCommand(['getVariable', 'MACHINE'])[0] 1749 1750 # location of the manifest files for this build; 1751 # note that this file is only produced if an image is produced 1752 license_directory = \ 1753 self.server.runCommand(['getVariable', 'LICENSE_DIRECTORY'])[0] 1754 1755 # file name extensions for image files 1756 image_file_extensions_unique = {} 1757 image_fstypes = self.server.runCommand( 1758 ['getVariable', 'IMAGE_FSTYPES'])[0] 1759 if image_fstypes is not None: 1760 image_types_str = image_fstypes.strip() 1761 image_file_extensions = re.sub(r' {2,}', ' ', image_types_str) 1762 image_file_extensions_unique = set(image_file_extensions.split(' ')) 1763 1764 targets = self.internal_state['targets'] 1765 1766 # filter out anything which isn't an image target 1767 image_targets = [target for target in targets if target.is_image] 1768 1769 if len(image_targets) > 0: 1770 #if there are image targets retrieve image_name 1771 image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0] 1772 if not image_name: 1773 #When build target is an image and image_name is not found as an environment variable 1774 logger.info("IMAGE_NAME not found, extracting from bitbake command") 1775 cmd = self.server.runCommand(['getVariable','BB_CMDLINE'])[0] 1776 #filter out tokens that are command line options 1777 cmd = [token for token in cmd if not token.startswith('-')] 1778 image_name = cmd[1].split(':', 1)[0] # remove everything after : in image name 1779 logger.info("IMAGE_NAME found as : %s " % image_name) 1780 1781 for image_target in image_targets: 1782 # this is set to True if we find at least one file relating to 1783 # this target; if this remains False after the scan, we copy the 1784 # files from the most-recent Target with the same target + machine 1785 # onto this Target instead 1786 has_files = False 1787 1788 # we construct this because by the time we reach 1789 # BuildCompleted, this has reset to 1790 # 'defaultpkgname-<MACHINE>-<BUILDNAME>'; 1791 # we need to change it to 1792 # <TARGET>-<MACHINE>-<BUILDNAME> 1793 real_image_name = re.sub(r'^defaultpkgname', image_target.target, 1794 image_name) 1795 1796 image_license_manifest_path = os.path.join( 1797 license_directory, 1798 real_image_name, 1799 'image_license.manifest') 1800 1801 image_package_manifest_path = os.path.join( 1802 license_directory, 1803 real_image_name, 1804 'image_license.manifest') 1805 1806 # if image_license.manifest exists, we can read the names of 1807 # bzImage, modules etc. files for this build from it, then look for 1808 # them in the DEPLOY_DIR_IMAGE; note that this file is only produced 1809 # if an image file was produced 1810 if os.path.isfile(image_license_manifest_path): 1811 has_files = True 1812 1813 basenames = self._get_filenames_from_image_license( 1814 image_license_manifest_path) 1815 1816 for basename in basenames: 1817 artifact_path = os.path.join(deploy_dir_image, basename) 1818 if not os.path.exists(artifact_path): 1819 logger.warning("artifact %s doesn't exist, skipping" % artifact_path) 1820 continue 1821 artifact_size = os.stat(artifact_path).st_size 1822 1823 # note that the artifact will only be saved against this 1824 # build if it hasn't been already 1825 self.orm_wrapper.save_target_kernel_file(image_target, 1826 artifact_path, artifact_size) 1827 1828 # store the license manifest path on the target 1829 # (this file is also created any time an image file is created) 1830 license_manifest_path = os.path.join(license_directory, 1831 real_image_name, 'license.manifest') 1832 1833 self.orm_wrapper.update_target_set_license_manifest( 1834 image_target, license_manifest_path) 1835 1836 # store the package manifest path on the target (this file 1837 # is created any time an image file is created) 1838 package_manifest_path = os.path.join(deploy_dir_image, 1839 real_image_name + '.rootfs.manifest') 1840 1841 if os.path.exists(package_manifest_path): 1842 self.orm_wrapper.update_target_set_package_manifest( 1843 image_target, package_manifest_path) 1844 1845 # scan the directory for image files relating to this build 1846 # (via real_image_name); note that we don't have to set 1847 # has_files = True, as searching for the license manifest file 1848 # will already have set it to true if at least one image file was 1849 # produced; note that the real_image_name includes BUILDNAME, which 1850 # in turn includes a timestamp; so if no files were produced for 1851 # this timestamp (i.e. the build reused existing image files already 1852 # in the directory), no files will be recorded against this target 1853 image_files = self._get_image_files(deploy_dir_image, 1854 real_image_name, image_file_extensions_unique) 1855 1856 for image_file in image_files: 1857 self.orm_wrapper.save_target_image_file_information( 1858 image_target, image_file['path'], image_file['size']) 1859 1860 if not has_files: 1861 # copy image files and build artifacts from the 1862 # most-recently-built Target with the 1863 # same target + machine as this Target; also copy the license 1864 # manifest path, as that is not treated as an artifact and needs 1865 # to be set separately 1866 similar_target = \ 1867 self.orm_wrapper.get_similar_target_with_image_files( 1868 image_target) 1869 1870 if similar_target: 1871 logger.info('image artifacts for target %s cloned from ' \ 1872 'target %s' % (image_target.pk, similar_target.pk)) 1873 self.orm_wrapper.clone_image_artifacts(similar_target, 1874 image_target) 1875 1876 def _get_sdk_targets(self): 1877 """ 1878 Return targets which could generate SDK artifacts, i.e. 1879 "do_populate_sdk" and "do_populate_sdk_ext". 1880 """ 1881 return [target for target in self.internal_state['targets'] \ 1882 if target.task in ['populate_sdk', 'populate_sdk_ext']] 1883 1884 def scan_sdk_artifacts(self, event): 1885 """ 1886 Note that we have to intercept an SDKArtifactInfo event from 1887 toaster.bbclass (via toasterui) to get hold of the SDK variables we 1888 need to be able to scan for files accurately: this is because 1889 variables like TOOLCHAIN_OUTPUTNAME have reset to None by the time 1890 BuildCompleted is fired by bitbake, so we have to get those values 1891 while the build is still in progress. 1892 1893 For populate_sdk_ext, this runs twice, with two different 1894 TOOLCHAIN_OUTPUTNAME settings, each of which will capture some of the 1895 files in the SDK output directory. 1896 """ 1897 sdk_vars = BuildInfoHelper._get_data_from_event(event) 1898 toolchain_outputname = sdk_vars['TOOLCHAIN_OUTPUTNAME'] 1899 1900 # targets which might have created SDK artifacts 1901 sdk_targets = self._get_sdk_targets() 1902 1903 # location of SDK artifacts 1904 tmpdir = self.server.runCommand(['getVariable', 'TMPDIR'])[0] 1905 sdk_dir = os.path.join(tmpdir, 'deploy', 'sdk') 1906 1907 # all files in the SDK directory 1908 artifacts = [] 1909 for dir_path, _, filenames in os.walk(sdk_dir): 1910 for filename in filenames: 1911 full_path = os.path.join(dir_path, filename) 1912 if not os.path.islink(full_path): 1913 artifacts.append(full_path) 1914 1915 for sdk_target in sdk_targets: 1916 # find files in the SDK directory which haven't already been 1917 # recorded against a Target and whose basename matches 1918 # TOOLCHAIN_OUTPUTNAME 1919 for artifact_path in artifacts: 1920 basename = os.path.basename(artifact_path) 1921 1922 toolchain_match = basename.startswith(toolchain_outputname) 1923 1924 # files which match the name of the target which produced them; 1925 # for example, 1926 # poky-glibc-x86_64-core-image-sato-i586-toolchain-ext-2.1+snapshot.sh 1927 target_match = re.search(sdk_target.target, basename) 1928 1929 # targets which produce "*-nativesdk-*" files 1930 is_ext_sdk_target = sdk_target.task in \ 1931 ['do_populate_sdk_ext', 'populate_sdk_ext'] 1932 1933 # SDK files which don't match the target name, i.e. 1934 # x86_64-nativesdk-libc.* 1935 # poky-glibc-x86_64-buildtools-tarball-i586-buildtools-nativesdk-standalone-2.1+snapshot* 1936 is_ext_sdk_file = re.search('-nativesdk-', basename) 1937 1938 file_from_target = (toolchain_match and target_match) or \ 1939 (is_ext_sdk_target and is_ext_sdk_file) 1940 1941 if file_from_target: 1942 # don't record the file if it's already been added to this 1943 # target 1944 matching_files = TargetSDKFile.objects.filter( 1945 target=sdk_target, file_name=artifact_path) 1946 1947 if matching_files.count() == 0: 1948 artifact_size = os.stat(artifact_path).st_size 1949 1950 self.orm_wrapper.save_target_sdk_file( 1951 sdk_target, artifact_path, artifact_size) 1952 1953 def clone_required_sdk_artifacts(self): 1954 """ 1955 If an SDK target doesn't have any SDK artifacts, this means that 1956 the postfuncs of populate_sdk or populate_sdk_ext didn't fire, which 1957 in turn means that the targets of this build didn't generate any new 1958 artifacts. 1959 1960 In this case, clone SDK artifacts for targets in the current build 1961 from existing targets for this build. 1962 """ 1963 sdk_targets = self._get_sdk_targets() 1964 for sdk_target in sdk_targets: 1965 # only clone for SDK targets which have no TargetSDKFiles yet 1966 if sdk_target.targetsdkfile_set.all().count() == 0: 1967 similar_target = \ 1968 self.orm_wrapper.get_similar_target_with_sdk_files( 1969 sdk_target) 1970 if similar_target: 1971 logger.info('SDK artifacts for target %s cloned from ' \ 1972 'target %s' % (sdk_target.pk, similar_target.pk)) 1973 self.orm_wrapper.clone_sdk_artifacts(similar_target, 1974 sdk_target) 1975 1976 def close(self, errorcode): 1977 self._store_build_done(errorcode) 1978 1979 if 'backlog' in self.internal_state: 1980 # we save missed events in the database for the current build 1981 tempevent = self.internal_state['backlog'].pop() 1982 # Do not skip command line build events 1983 self.store_log_event(tempevent,False) 1984 1985 1986 # unset the brbe; this is to prevent subsequent command-line builds 1987 # being incorrectly attached to the previous Toaster-triggered build; 1988 # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=9021 1989 self.brbe = None 1990 1991 # unset the internal Build object to prevent it being reused for the 1992 # next build 1993 self.internal_state['build'] = None 1994