xref: /openbmc/openbmc/poky/bitbake/lib/bb/ui/buildinfohelper.py (revision f1e5d6968976c2341c6d554bfcc8895f1b33c26b)
1#
2# BitBake ToasterUI Implementation
3#
4# Copyright (C) 2013        Intel Corporation
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import sys
10import bb
11import re
12import os
13
14import django
15from django.utils import timezone
16
17import toaster
18# Add toaster module to the search path to help django.setup() find the right
19# modules
20sys.path.insert(0, os.path.dirname(toaster.__file__))
21
22#Set the DJANGO_SETTINGS_MODULE if it's not already set
23os.environ["DJANGO_SETTINGS_MODULE"] =\
24    os.environ.get("DJANGO_SETTINGS_MODULE",
25                   "toaster.toastermain.settings")
26# Setup django framework (needs to be done before importing modules)
27django.setup()
28
29from orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
30from orm.models import Target_Image_File, TargetKernelFile, TargetSDKFile
31from orm.models import Variable, VariableHistory
32from orm.models import Package, Package_File, Target_Installed_Package, Target_File
33from orm.models import Task_Dependency, Package_Dependency
34from orm.models import Recipe_Dependency, Provides
35from orm.models import Project, CustomImagePackage
36from orm.models import signal_runbuilds
37
38from bldcontrol.models import BuildEnvironment, BuildRequest
39from bldcontrol.models import BRLayer
40from bldcontrol import bbcontroller
41
42from bb.msg import BBLogFormatter as formatter
43from django.db import models
44from pprint import pformat
45import logging
46from datetime import datetime, timedelta
47
48from django.db import transaction
49
50
51# pylint: disable=invalid-name
52# the logger name is standard throughout BitBake
53logger = logging.getLogger("ToasterLogger")
54
55class NotExisting(Exception):
56    pass
57
58class ORMWrapper(object):
59    """ This class creates the dictionaries needed to store information in the database
60        following the format defined by the Django models. It is also used to save this
61        information in the database.
62    """
63
64    def __init__(self):
65        self.layer_version_objects = []
66        self.layer_version_built = []
67        self.task_objects = {}
68        self.recipe_objects = {}
69
70    @staticmethod
71    def _build_key(**kwargs):
72        key = "0"
73        for k in sorted(kwargs.keys()):
74            if isinstance(kwargs[k], models.Model):
75                key += "-%d" % kwargs[k].id
76            else:
77                key += "-%s" % str(kwargs[k])
78        return key
79
80
81    def _cached_get_or_create(self, clazz, **kwargs):
82        """ This is a memory-cached get_or_create. We assume that the objects will not be created in the
83            database through any other means.
84        """
85
86        assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
87
88        key = ORMWrapper._build_key(**kwargs)
89        dictname = "objects_%s" % clazz.__name__
90        if not dictname in vars(self).keys():
91            vars(self)[dictname] = {}
92
93        created = False
94        if not key in vars(self)[dictname].keys():
95            vars(self)[dictname][key], created = \
96                clazz.objects.get_or_create(**kwargs)
97
98        return (vars(self)[dictname][key], created)
99
100
101    def _cached_get(self, clazz, **kwargs):
102        """ This is a memory-cached get. We assume that the objects will not change  in the database between gets.
103        """
104        assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
105
106        key = ORMWrapper._build_key(**kwargs)
107        dictname = "objects_%s" % clazz.__name__
108
109        if not dictname in vars(self).keys():
110            vars(self)[dictname] = {}
111
112        if not key in vars(self)[dictname].keys():
113            vars(self)[dictname][key] = clazz.objects.get(**kwargs)
114
115        return vars(self)[dictname][key]
116
117    def get_similar_target_with_image_files(self, target):
118        """
119        Get a Target object "similar" to target; i.e. with the same target
120        name ('core-image-minimal' etc.) and machine.
121        """
122        return target.get_similar_target_with_image_files()
123
124    def get_similar_target_with_sdk_files(self, target):
125        return target.get_similar_target_with_sdk_files()
126
127    def clone_image_artifacts(self, target_from, target_to):
128        target_to.clone_image_artifacts_from(target_from)
129
130    def clone_sdk_artifacts(self, target_from, target_to):
131        target_to.clone_sdk_artifacts_from(target_from)
132
133    def _timestamp_to_datetime(self, secs):
134        """
135        Convert timestamp in seconds to Python datetime
136        """
137        return timezone.make_aware(datetime(1970, 1, 1) + timedelta(seconds=secs))
138
139    # pylint: disable=no-self-use
140    # we disable detection of no self use in functions because the methods actually work on the object
141    # even if they don't touch self anywhere
142
143    # pylint: disable=bad-continuation
144    # we do not follow the python conventions for continuation indentation due to long lines here
145
146    def get_or_create_build_object(self, brbe):
147        prj = None
148        buildrequest = None
149        if brbe is not None:
150            # Toaster-triggered build
151            logger.debug("buildinfohelper: brbe is %s" % brbe)
152            br, _ = brbe.split(":")
153            buildrequest = BuildRequest.objects.get(pk=br)
154            prj = buildrequest.project
155        else:
156            # CLI build
157            prj = Project.objects.get_or_create_default_project()
158            logger.debug("buildinfohelper: project is not specified, defaulting to %s" % prj)
159
160        if buildrequest is not None:
161            # reuse existing Build object
162            build = buildrequest.build
163            build.project = prj
164            build.save()
165        else:
166            # create new Build object
167            now = timezone.now()
168            build = Build.objects.create(
169                project=prj,
170                started_on=now,
171                completed_on=now,
172                build_name='')
173
174        logger.debug("buildinfohelper: build is created %s" % build)
175
176        if buildrequest is not None:
177            buildrequest.build = build
178            buildrequest.save()
179
180        return build
181
182    def update_build(self, build, data_dict):
183        for key in data_dict:
184            setattr(build, key, data_dict[key])
185        build.save()
186
187    @staticmethod
188    def get_or_create_targets(target_info):
189        """
190        NB get_or_create() is used here because for Toaster-triggered builds,
191        we already created the targets when the build was triggered.
192        """
193        result = []
194        for target in target_info['targets']:
195            task = ''
196            if ':' in target:
197                target, task = target.split(':', 1)
198            if task.startswith('do_'):
199                task = task[3:]
200            if task == 'build':
201                task = ''
202
203            obj, _ = Target.objects.get_or_create(build=target_info['build'],
204                                                  target=target,
205                                                  task=task)
206            result.append(obj)
207        return result
208
209    def update_build_stats_and_outcome(self, build, errors, warnings, taskfailures):
210        assert isinstance(build,Build)
211        assert isinstance(errors, int)
212        assert isinstance(warnings, int)
213
214        if build.outcome == Build.CANCELLED:
215            return
216        try:
217            if build.buildrequest.state == BuildRequest.REQ_CANCELLING:
218                return
219        except AttributeError:
220            # We may not have a buildrequest if this is a command line build
221            pass
222
223        outcome = Build.SUCCEEDED
224        if errors or taskfailures:
225            outcome = Build.FAILED
226
227        build.completed_on = timezone.now()
228        build.outcome = outcome
229        build.save()
230
231        # We force a sync point here to force the outcome status commit,
232        # which resolves a race condition with the build completion takedown
233        transaction.set_autocommit(True)
234        transaction.set_autocommit(False)
235
236        signal_runbuilds()
237
238    def update_target_set_license_manifest(self, target, license_manifest_path):
239        target.license_manifest_path = license_manifest_path
240        target.save()
241
242    def update_target_set_package_manifest(self, target, package_manifest_path):
243        target.package_manifest_path = package_manifest_path
244        target.save()
245
246    def update_task_object(self, build, task_name, recipe_name, task_stats):
247        """
248        Find the task for build which matches the recipe and task name
249        to be stored
250        """
251        task_to_update = Task.objects.get(
252            build = build,
253            task_name = task_name,
254            recipe__name = recipe_name
255        )
256
257        if 'started' in task_stats and 'ended' in task_stats:
258            task_to_update.started = self._timestamp_to_datetime(task_stats['started'])
259            task_to_update.ended = self._timestamp_to_datetime(task_stats['ended'])
260            task_to_update.elapsed_time = (task_stats['ended'] - task_stats['started'])
261        task_to_update.cpu_time_user = task_stats.get('cpu_time_user')
262        task_to_update.cpu_time_system = task_stats.get('cpu_time_system')
263        if 'disk_io_read' in task_stats and 'disk_io_write' in task_stats:
264            task_to_update.disk_io_read = task_stats['disk_io_read']
265            task_to_update.disk_io_write = task_stats['disk_io_write']
266            task_to_update.disk_io = task_stats['disk_io_read'] + task_stats['disk_io_write']
267
268        task_to_update.save()
269
270    def get_update_task_object(self, task_information, must_exist = False):
271        assert 'build' in task_information
272        assert 'recipe' in task_information
273        assert 'task_name' in task_information
274
275        # we use must_exist info for database look-up optimization
276        task_object, created = self._cached_get_or_create(Task,
277                        build=task_information['build'],
278                        recipe=task_information['recipe'],
279                        task_name=task_information['task_name']
280                        )
281        if created and must_exist:
282            task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
283            raise NotExisting("Task object created when expected to exist", task_information)
284
285        object_changed = False
286        for v in vars(task_object):
287            if v in task_information.keys():
288                if vars(task_object)[v] != task_information[v]:
289                    vars(task_object)[v] = task_information[v]
290                    object_changed = True
291
292        # update setscene-related information if the task has a setscene
293        if task_object.outcome == Task.OUTCOME_COVERED and 1 == task_object.get_related_setscene().count():
294            task_object.outcome = Task.OUTCOME_CACHED
295            object_changed = True
296
297            outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
298                                    recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
299            if outcome_task_setscene == Task.OUTCOME_SUCCESS:
300                task_object.sstate_result = Task.SSTATE_RESTORED
301                object_changed = True
302            elif outcome_task_setscene == Task.OUTCOME_FAILED:
303                task_object.sstate_result = Task.SSTATE_FAILED
304                object_changed = True
305
306        if object_changed:
307            task_object.save()
308        return task_object
309
310
311    def get_update_recipe_object(self, recipe_information, must_exist = False):
312        assert 'layer_version' in recipe_information
313        assert 'file_path' in recipe_information
314        assert 'pathflags' in recipe_information
315
316        assert not recipe_information['file_path'].startswith("/")      # we should have layer-relative paths at all times
317
318
319        def update_recipe_obj(recipe_object):
320            object_changed = False
321            for v in vars(recipe_object):
322                if v in recipe_information.keys():
323                    object_changed = True
324                    vars(recipe_object)[v] = recipe_information[v]
325
326            if object_changed:
327                recipe_object.save()
328
329        recipe, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
330                                     file_path=recipe_information['file_path'], pathflags = recipe_information['pathflags'])
331
332        update_recipe_obj(recipe)
333
334        built_recipe = None
335        # Create a copy of the recipe for historical puposes and update it
336        for built_layer in self.layer_version_built:
337            if built_layer.layer == recipe_information['layer_version'].layer:
338                built_recipe, c = self._cached_get_or_create(Recipe,
339                        layer_version=built_layer,
340                        file_path=recipe_information['file_path'],
341                        pathflags = recipe_information['pathflags'])
342                update_recipe_obj(built_recipe)
343                break
344
345
346        # If we're in analysis mode or if this is a custom recipe
347        # then we are wholly responsible for the data
348        # and therefore we return the 'real' recipe rather than the build
349        # history copy of the recipe.
350        if  recipe_information['layer_version'].build is not None and \
351            recipe_information['layer_version'].build.project == \
352                Project.objects.get_or_create_default_project():
353            return recipe
354
355        if built_recipe is None:
356            return recipe
357
358        return built_recipe
359
360    def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information):
361        if isinstance(layer_obj, Layer_Version):
362            # We already found our layer version for this build so just
363            # update it with the new build information
364            logger.debug("We found our layer from toaster")
365            layer_obj.local_path = layer_version_information['local_path']
366            layer_obj.save()
367            self.layer_version_objects.append(layer_obj)
368
369            # create a new copy of this layer version as a snapshot for
370            # historical purposes
371            layer_copy, c = Layer_Version.objects.get_or_create(
372                build=build_obj,
373                layer=layer_obj.layer,
374                release=layer_obj.release,
375                branch=layer_version_information['branch'],
376                commit=layer_version_information['commit'],
377                local_path=layer_version_information['local_path'],
378            )
379
380            logger.debug("Created new layer version %s for build history",
381                         layer_copy.layer.name)
382
383            self.layer_version_built.append(layer_copy)
384
385            return layer_obj
386
387        assert isinstance(build_obj, Build)
388        assert isinstance(layer_obj, Layer)
389        assert 'branch' in layer_version_information
390        assert 'commit' in layer_version_information
391        assert 'priority' in layer_version_information
392        assert 'local_path' in layer_version_information
393
394        # If we're doing a command line build then associate this new layer with the
395        # project to avoid it 'contaminating' toaster data
396        project = None
397        if build_obj.project == Project.objects.get_or_create_default_project():
398            project = build_obj.project
399
400        layer_version_object, _ = Layer_Version.objects.get_or_create(
401                                  build = build_obj,
402                                  layer = layer_obj,
403                                  branch = layer_version_information['branch'],
404                                  commit = layer_version_information['commit'],
405                                  priority = layer_version_information['priority'],
406                                  local_path = layer_version_information['local_path'],
407                                  project=project)
408
409        self.layer_version_objects.append(layer_version_object)
410
411        return layer_version_object
412
413    def get_update_layer_object(self, layer_information, brbe):
414        assert 'name' in layer_information
415        assert 'layer_index_url' in layer_information
416
417        # From command line builds we have no brbe as the request is directly
418        # from bitbake
419        if brbe is None:
420            # If we don't have git commit sha then we're using a non-git
421            # layer so set the layer_source_dir to identify it as such
422            if not layer_information['version']['commit']:
423                local_source_dir = layer_information["local_path"]
424            else:
425                local_source_dir = None
426
427            layer_object, _ = \
428                Layer.objects.get_or_create(
429                    name=layer_information['name'],
430                    local_source_dir=local_source_dir,
431                    layer_index_url=layer_information['layer_index_url'])
432
433            return layer_object
434        else:
435            br_id, be_id = brbe.split(":")
436
437            # Find the layer version by matching the layer event information
438            # against the metadata we have in Toaster
439
440            try:
441                br_layer = BRLayer.objects.get(req=br_id,
442                                               name=layer_information['name'])
443                return br_layer.layer_version
444            except (BRLayer.MultipleObjectsReturned, BRLayer.DoesNotExist):
445                # There are multiple of the same layer name or the name
446                # hasn't been determined by the toaster.bbclass layer
447                # so let's filter by the local_path
448                bc = bbcontroller.getBuildEnvironmentController(pk=be_id)
449                for br_layer in BRLayer.objects.filter(req=br_id):
450                    if br_layer.giturl and \
451                       layer_information['local_path'].endswith(
452                           bc.getGitCloneDirectory(br_layer.giturl,
453                                                   br_layer.commit)):
454                            return br_layer.layer_version
455
456                    if br_layer.local_source_dir == \
457                            layer_information['local_path']:
458                        return br_layer.layer_version
459
460        # We've reached the end of our search and couldn't find the layer
461        # we can continue but some data may be missing
462        raise NotExisting("Unidentified layer %s" %
463                          pformat(layer_information))
464
465    def save_target_file_information(self, build_obj, target_obj, filedata):
466        assert isinstance(build_obj, Build)
467        assert isinstance(target_obj, Target)
468        dirs = filedata['dirs']
469        files = filedata['files']
470        syms = filedata['syms']
471
472        # always create the root directory as a special case;
473        # note that this is never displayed, so the owner, group,
474        # size, permission are irrelevant
475        tf_obj = Target_File.objects.create(target = target_obj,
476                                            path = '/',
477                                            size = 0,
478                                            owner = '',
479                                            group = '',
480                                            permission = '',
481                                            inodetype = Target_File.ITYPE_DIRECTORY)
482        tf_obj.save()
483
484        # insert directories, ordered by name depth
485        for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))):
486            (user, group, size) = d[1:4]
487            permission = d[0][1:]
488            path = d[4].lstrip(".")
489
490            # we already created the root directory, so ignore any
491            # entry for it
492            if not path:
493                continue
494
495            parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
496            if not parent_path:
497                parent_path = "/"
498            parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
499            Target_File.objects.create(
500                        target = target_obj,
501                        path = path,
502                        size = size,
503                        inodetype = Target_File.ITYPE_DIRECTORY,
504                        permission = permission,
505                        owner = user,
506                        group = group,
507                        directory = parent_obj)
508
509
510        # we insert files
511        for d in files:
512            (user, group, size) = d[1:4]
513            permission = d[0][1:]
514            path = d[4].lstrip(".")
515            parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
516            inodetype = Target_File.ITYPE_REGULAR
517            if d[0].startswith('b'):
518                inodetype = Target_File.ITYPE_BLOCK
519            if d[0].startswith('c'):
520                inodetype = Target_File.ITYPE_CHARACTER
521            if d[0].startswith('p'):
522                inodetype = Target_File.ITYPE_FIFO
523
524            tf_obj = Target_File.objects.create(
525                        target = target_obj,
526                        path = path,
527                        size = size,
528                        inodetype = inodetype,
529                        permission = permission,
530                        owner = user,
531                        group = group)
532            parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
533            tf_obj.directory = parent_obj
534            tf_obj.save()
535
536        # we insert symlinks
537        for d in syms:
538            (user, group, size) = d[1:4]
539            permission = d[0][1:]
540            path = d[4].lstrip(".")
541            filetarget_path = d[6]
542
543            parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
544            if not filetarget_path.startswith("/"):
545                # we have a relative path, get a normalized absolute one
546                filetarget_path = parent_path + "/" + filetarget_path
547                fcp = filetarget_path.split("/")
548                fcpl = []
549                for i in fcp:
550                    if i == "..":
551                        fcpl.pop()
552                    else:
553                        fcpl.append(i)
554                filetarget_path = "/".join(fcpl)
555
556            try:
557                filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path)
558            except Target_File.DoesNotExist:
559                # we might have an invalid link; no way to detect this. just set it to None
560                filetarget_obj = None
561
562            try:
563                parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
564            except Target_File.DoesNotExist:
565                parent_obj = None
566
567            Target_File.objects.create(
568                        target = target_obj,
569                        path = path,
570                        size = size,
571                        inodetype = Target_File.ITYPE_SYMLINK,
572                        permission = permission,
573                        owner = user,
574                        group = group,
575                        directory = parent_obj,
576                        sym_target = filetarget_obj)
577
578
579    def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes, built_package=False):
580        assert isinstance(build_obj, Build)
581        assert isinstance(target_obj, Target)
582
583        errormsg = []
584        for p in packagedict:
585            # Search name swtiches round the installed name vs package name
586            # by default installed name == package name
587            searchname = p
588            if p not in pkgpnmap:
589                logger.warning("Image packages list contains %p, but is"
590                               " missing from all packages list where the"
591                               " metadata comes from. Skipping...", p)
592                continue
593
594            if 'OPKGN' in pkgpnmap[p].keys():
595                searchname = pkgpnmap[p]['OPKGN']
596
597            built_recipe = recipes[pkgpnmap[p]['PN']]
598
599            if built_package:
600                packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
601                recipe = built_recipe
602            else:
603                packagedict[p]['object'], created = \
604                        CustomImagePackage.objects.get_or_create(name=searchname)
605                # Clear the Package_Dependency objects as we're going to update
606                # the CustomImagePackage with the latest dependency information
607                packagedict[p]['object'].package_dependencies_target.all().delete()
608                packagedict[p]['object'].package_dependencies_source.all().delete()
609                try:
610                    recipe = self._cached_get(
611                        Recipe,
612                        name=built_recipe.name,
613                        layer_version__build=None,
614                        layer_version__release=
615                        built_recipe.layer_version.release,
616                        file_path=built_recipe.file_path,
617                        version=built_recipe.version
618                    )
619                except (Recipe.DoesNotExist,
620                        Recipe.MultipleObjectsReturned) as e:
621                    logger.info("We did not find one recipe for the"
622                                "configuration data package %s %s" % (p, e))
623                    continue
624
625            if created or packagedict[p]['object'].size == -1:    # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
626                # fill in everything we can from the runtime-reverse package data
627                try:
628                    packagedict[p]['object'].recipe = recipe
629                    packagedict[p]['object'].version = pkgpnmap[p]['PV']
630                    packagedict[p]['object'].installed_name = p
631                    packagedict[p]['object'].revision = pkgpnmap[p]['PR']
632                    packagedict[p]['object'].license = pkgpnmap[p]['LICENSE']
633                    packagedict[p]['object'].section = pkgpnmap[p]['SECTION']
634                    packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY']
635                    packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION']
636                    packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
637
638                # no files recorded for this package, so save files info
639                    packagefile_objects = []
640                    for targetpath in pkgpnmap[p]['FILES_INFO']:
641                        targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
642                        packagefile_objects.append(Package_File( package = packagedict[p]['object'],
643                            path = targetpath,
644                            size = targetfilesize))
645                    if packagefile_objects:
646                        Package_File.objects.bulk_create(packagefile_objects)
647                except KeyError as e:
648                    errormsg.append("  stpi: Key error, package %s key %s \n" % (p, e))
649
650            # save disk installed size
651            packagedict[p]['object'].installed_size = packagedict[p]['size']
652            packagedict[p]['object'].save()
653
654            if built_package:
655                Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
656
657        packagedeps_objs = []
658        pattern_so = re.compile(r'.*\.so(\.\d*)?$')
659        pattern_lib = re.compile(r'.*\-suffix(\d*)?$')
660        pattern_ko = re.compile(r'^kernel-module-.*')
661        for p in packagedict:
662            for (px,deptype) in packagedict[p]['depends']:
663                if deptype == 'depends':
664                    tdeptype = Package_Dependency.TYPE_TRDEPENDS
665                elif deptype == 'recommends':
666                    tdeptype = Package_Dependency.TYPE_TRECOMMENDS
667
668                try:
669                    # Skip known non-package objects like libraries and kernel modules
670                    if pattern_so.match(px) or pattern_lib.match(px):
671                        logger.info("Toaster does not add library file dependencies to packages (%s,%s)", p, px)
672                        continue
673                    if pattern_ko.match(px):
674                        logger.info("Toaster does not add kernel module dependencies to packages (%s,%s)", p, px)
675                        continue
676                    packagedeps_objs.append(Package_Dependency(
677                        package = packagedict[p]['object'],
678                        depends_on = packagedict[px]['object'],
679                        dep_type = tdeptype,
680                        target = target_obj))
681                except KeyError as e:
682                    logger.warning("Could not add dependency to the package %s "
683                                   "because %s is an unknown package", p, px)
684
685        if packagedeps_objs:
686            Package_Dependency.objects.bulk_create(packagedeps_objs)
687        else:
688            logger.info("No package dependencies created")
689
690        if errormsg:
691            logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", "".join(errormsg))
692
693    def save_target_image_file_information(self, target_obj, file_name, file_size):
694        Target_Image_File.objects.create(target=target_obj,
695            file_name=file_name, file_size=file_size)
696
697    def save_target_kernel_file(self, target_obj, file_name, file_size):
698        """
699        Save kernel file (bzImage, modules*) information for a Target target_obj.
700        """
701        TargetKernelFile.objects.create(target=target_obj,
702            file_name=file_name, file_size=file_size)
703
704    def save_target_sdk_file(self, target_obj, file_name, file_size):
705        """
706        Save SDK artifacts to the database, associating them with a
707        Target object.
708        """
709        TargetSDKFile.objects.create(target=target_obj, file_name=file_name,
710            file_size=file_size)
711
712    def create_logmessage(self, log_information):
713        assert 'build' in log_information
714        assert 'level' in log_information
715        assert 'message' in log_information
716
717        log_object = LogMessage.objects.create(
718                        build = log_information['build'],
719                        level = log_information['level'],
720                        message = log_information['message'])
721
722        for v in vars(log_object):
723            if v in log_information.keys():
724                vars(log_object)[v] = log_information[v]
725
726        return log_object.save()
727
728
729    def save_build_package_information(self, build_obj, package_info, recipes,
730                                       built_package):
731        # assert isinstance(build_obj, Build)
732
733        if not 'PN' in package_info.keys():
734            # no package data to save (e.g. 'OPKGN'="lib64-*"|"lib32-*")
735            return None
736
737        # create and save the object
738        pname = package_info['PKG']
739        built_recipe = recipes[package_info['PN']]
740        if 'OPKGN' in package_info.keys():
741            pname = package_info['OPKGN']
742
743        if built_package:
744            bp_object, _ = Package.objects.get_or_create( build = build_obj,
745                                                         name = pname )
746            recipe = built_recipe
747        else:
748            bp_object, created = \
749                    CustomImagePackage.objects.get_or_create(name=pname)
750            try:
751                recipe = self._cached_get(Recipe,
752                                          name=built_recipe.name,
753                                          layer_version__build=None,
754                                          file_path=built_recipe.file_path,
755                                          version=built_recipe.version)
756
757            except (Recipe.DoesNotExist, Recipe.MultipleObjectsReturned):
758                logger.debug("We did not find one recipe for the configuration"
759                             "data package %s" % pname)
760                return
761
762        bp_object.installed_name = package_info['PKG']
763        bp_object.recipe = recipe
764        bp_object.version = package_info['PKGV']
765        bp_object.revision = package_info['PKGR']
766        bp_object.summary = package_info['SUMMARY']
767        bp_object.description = package_info['DESCRIPTION']
768        bp_object.size = int(package_info['PKGSIZE'])
769        bp_object.section = package_info['SECTION']
770        bp_object.license = package_info['LICENSE']
771        bp_object.save()
772
773        # save any attached file information
774        packagefile_objects = []
775        for path in package_info['FILES_INFO']:
776            packagefile_objects.append(Package_File( package = bp_object,
777                                        path = path,
778                                        size = package_info['FILES_INFO'][path] ))
779        if packagefile_objects:
780            Package_File.objects.bulk_create(packagefile_objects)
781
782        def _po_byname(p):
783            if built_package:
784                pkg, created = Package.objects.get_or_create(build=build_obj,
785                                                             name=p)
786            else:
787                pkg, created = CustomImagePackage.objects.get_or_create(name=p)
788
789            if created:
790                pkg.size = -1
791                pkg.save()
792            return pkg
793
794        packagedeps_objs = []
795        # save soft dependency information
796        if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
797            for p in bb.utils.explode_deps(package_info['RDEPENDS']):
798                packagedeps_objs.append(Package_Dependency(  package = bp_object,
799                    depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
800        if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
801            for p in bb.utils.explode_deps(package_info['RPROVIDES']):
802                packagedeps_objs.append(Package_Dependency(  package = bp_object,
803                    depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
804        if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
805            for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
806                packagedeps_objs.append(Package_Dependency(  package = bp_object,
807                    depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
808        if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
809            for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
810                packagedeps_objs.append(Package_Dependency(  package = bp_object,
811                    depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
812        if 'RREPLACES' in package_info and package_info['RREPLACES']:
813            for p in bb.utils.explode_deps(package_info['RREPLACES']):
814                packagedeps_objs.append(Package_Dependency(  package = bp_object,
815                    depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
816        if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
817            for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
818                packagedeps_objs.append(Package_Dependency(  package = bp_object,
819                    depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
820
821        if packagedeps_objs:
822            Package_Dependency.objects.bulk_create(packagedeps_objs)
823
824        return bp_object
825
826    def save_build_variables(self, build_obj, vardump):
827        assert isinstance(build_obj, Build)
828
829        for k in vardump:
830            desc = vardump[k]['doc']
831            if desc is None:
832                var_words = [word for word in k.split('_')]
833                root_var = "_".join([word for word in var_words if word.isupper()])
834                if root_var and root_var != k and root_var in vardump:
835                    desc = vardump[root_var]['doc']
836            if desc is None:
837                desc = ''
838            if desc:
839                HelpText.objects.get_or_create(build=build_obj,
840                                               area=HelpText.VARIABLE,
841                                               key=k, text=desc)
842            if not bool(vardump[k]['func']):
843                value = vardump[k]['v']
844                if value is None:
845                    value = ''
846                variable_obj = Variable.objects.create( build = build_obj,
847                    variable_name = k,
848                    variable_value = value,
849                    description = desc)
850
851                varhist_objects = []
852                for vh in vardump[k]['history']:
853                    if not 'documentation.conf' in vh['file']:
854                        varhist_objects.append(VariableHistory( variable = variable_obj,
855                                file_name = vh['file'],
856                                line_number = vh['line'],
857                                operation = vh['op']))
858                if varhist_objects:
859                    VariableHistory.objects.bulk_create(varhist_objects)
860
861
862class MockEvent(object):
863    """ This object is used to create event, for which normal event-processing methods can
864        be used, out of data that is not coming via an actual event
865    """
866    def __init__(self):
867        self.msg = None
868        self.levelno = None
869        self.taskname = None
870        self.taskhash = None
871        self.pathname = None
872        self.lineno = None
873
874    def getMessage(self):
875        """
876        Simulate LogRecord message return
877        """
878        return self.msg
879
880
881class BuildInfoHelper(object):
882    """ This class gathers the build information from the server and sends it
883        towards the ORM wrapper for storing in the database
884        It is instantiated once per build
885        Keeps in memory all data that needs matching before writing it to the database
886    """
887
888    # tasks which produce image files; note we include '', as we set
889    # the task for a target to '' (i.e. 'build') if no target is
890    # explicitly defined
891    IMAGE_GENERATING_TASKS = ['', 'build', 'image', 'populate_sdk_ext']
892
893    # pylint: disable=protected-access
894    # the code will look into the protected variables of the event; no easy way around this
895    # pylint: disable=bad-continuation
896    # we do not follow the python conventions for continuation indentation due to long lines here
897
898    def __init__(self, server, has_build_history = False, brbe = None):
899        self.internal_state = {}
900        self.internal_state['taskdata'] = {}
901        self.internal_state['targets'] = []
902        self.task_order = 0
903        self.autocommit_step = 1
904        self.server = server
905        self.orm_wrapper = ORMWrapper()
906        self.has_build_history = has_build_history
907        self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
908
909        # this is set for Toaster-triggered builds by localhostbecontroller
910        # via toasterui
911        self.brbe = brbe
912
913        self.project = None
914
915        logger.debug("buildinfohelper: Build info helper inited %s" % vars(self))
916
917
918    ###################
919    ## methods to convert event/external info into objects that the ORM layer uses
920
921    def _ensure_build(self):
922        """
923        Ensure the current build object exists and is up to date with
924        data on the bitbake server
925        """
926        if not 'build' in self.internal_state or not self.internal_state['build']:
927            # create the Build object
928            self.internal_state['build'] = \
929                self.orm_wrapper.get_or_create_build_object(self.brbe)
930
931        build = self.internal_state['build']
932
933        # update missing fields on the Build object with found data
934        build_info = {}
935
936        # set to True if at least one field is going to be set
937        changed = False
938
939        if not build.build_name:
940            build_name = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
941
942            # only reset the build name if the one on the server is actually
943            # a valid value for the build_name field
944            if build_name is not None:
945                build_info['build_name'] = build_name
946                changed = True
947
948        if not build.machine:
949            build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
950            changed = True
951
952        if not build.distro:
953            build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
954            changed = True
955
956        if not build.distro_version:
957            build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
958            changed = True
959
960        if not build.bitbake_version:
961            build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
962            changed = True
963
964        if changed:
965            self.orm_wrapper.update_build(self.internal_state['build'], build_info)
966
967    def _get_task_information(self, event, recipe):
968        assert 'taskname' in vars(event)
969        self._ensure_build()
970
971        task_information = {}
972        task_information['build'] = self.internal_state['build']
973        task_information['outcome'] = Task.OUTCOME_NA
974        task_information['recipe'] = recipe
975        task_information['task_name'] = event.taskname
976        try:
977            # some tasks don't come with a hash. and that's ok
978            task_information['sstate_checksum'] = event.taskhash
979        except AttributeError:
980            pass
981        return task_information
982
983    def _get_layer_version_for_dependency(self, pathRE):
984        """ Returns the layer in the toaster db that has a full regex
985        match to the pathRE. pathRE - the layer path passed as a regex in the
986        event. It is created in cooker.py as a collection for the layer
987        priorities.
988        """
989        self._ensure_build()
990
991        def _sort_longest_path(layer_version):
992            assert isinstance(layer_version, Layer_Version)
993            return len(layer_version.local_path)
994
995        # Our paths don't append a trailing slash
996        if pathRE.endswith("/"):
997            pathRE = pathRE[:-1]
998
999        p = re.compile(pathRE)
1000        path=re.sub(r'[$^]',r'',pathRE)
1001        # Heuristics: we always match recipe to the deepest layer path in
1002        # the discovered layers
1003        for lvo in sorted(self.orm_wrapper.layer_version_objects,
1004                          reverse=True, key=_sort_longest_path):
1005            if p.fullmatch(os.path.abspath(lvo.local_path)):
1006                return lvo
1007            if lvo.layer.local_source_dir:
1008                if p.fullmatch(os.path.abspath(lvo.layer.local_source_dir)):
1009                    return lvo
1010            if 0 == path.find(lvo.local_path):
1011                # sub-layer path inside existing layer
1012                return lvo
1013
1014        # if we get here, we didn't read layers correctly;
1015        # dump whatever information we have on the error log
1016        logger.warning("Could not match layer dependency for path %s : %s",
1017                       pathRE,
1018                       self.orm_wrapper.layer_version_objects)
1019        return None
1020
1021    def _get_layer_version_for_path(self, path):
1022        self._ensure_build()
1023
1024        def _slkey_interactive(layer_version):
1025            assert isinstance(layer_version, Layer_Version)
1026            return len(layer_version.local_path)
1027
1028        # Heuristics: we always match recipe to the deepest layer path in the discovered layers
1029        for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive):
1030            # we can match to the recipe file path
1031            if path.startswith(lvo.local_path):
1032                return lvo
1033            if lvo.layer.local_source_dir and \
1034               path.startswith(lvo.layer.local_source_dir):
1035                return lvo
1036
1037        #if we get here, we didn't read layers correctly; dump whatever information we have on the error log
1038        logger.warning("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
1039
1040        #mockup the new layer
1041        unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="")
1042        unknown_layer_version_obj, _ = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
1043
1044        # append it so we don't run into this error again and again
1045        self.orm_wrapper.layer_version_objects.append(unknown_layer_version_obj)
1046
1047        return unknown_layer_version_obj
1048
1049    def _get_recipe_information_from_taskfile(self, taskfile):
1050        localfilepath = taskfile.split(":")[-1]
1051        filepath_flags = ":".join(sorted(taskfile.split(":")[:-1]))
1052        layer_version_obj = self._get_layer_version_for_path(localfilepath)
1053
1054
1055
1056        recipe_info = {}
1057        recipe_info['layer_version'] = layer_version_obj
1058        recipe_info['file_path'] = localfilepath
1059        recipe_info['pathflags'] = filepath_flags
1060
1061        if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
1062            recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
1063        else:
1064            raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
1065
1066        return recipe_info
1067
1068
1069    ################################
1070    ## external available methods to store information
1071    @staticmethod
1072    def _get_data_from_event(event):
1073        evdata = None
1074        if '_localdata' in vars(event):
1075            evdata = event._localdata
1076        elif 'data' in vars(event):
1077            evdata = event.data
1078        else:
1079            raise Exception("Event with neither _localdata or data properties")
1080        return evdata
1081
1082    def store_layer_info(self, event):
1083        layerinfos = BuildInfoHelper._get_data_from_event(event)
1084        self.internal_state['lvs'] = {}
1085        for layer in layerinfos:
1086            try:
1087                self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
1088                self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path']
1089            except NotExisting as nee:
1090                logger.warning("buildinfohelper: cannot identify layer exception:%s ", nee)
1091
1092    def store_started_build(self):
1093        self._ensure_build()
1094
1095    def save_build_log_file_path(self, build_log_path):
1096        self._ensure_build()
1097
1098        if not self.internal_state['build'].cooker_log_path:
1099            data_dict = {'cooker_log_path': build_log_path}
1100            self.orm_wrapper.update_build(self.internal_state['build'], data_dict)
1101
1102    def save_build_targets(self, event):
1103        self._ensure_build()
1104
1105        # create target information
1106        assert '_pkgs' in vars(event)
1107        target_information = {}
1108        target_information['targets'] = event._pkgs
1109        target_information['build'] = self.internal_state['build']
1110
1111        self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information)
1112
1113    def save_build_layers_and_variables(self):
1114        self._ensure_build()
1115
1116        build_obj = self.internal_state['build']
1117
1118        # save layer version information for this build
1119        if not 'lvs' in self.internal_state:
1120            logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
1121        else:
1122            for layer_obj in self.internal_state['lvs']:
1123                self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
1124
1125            del self.internal_state['lvs']
1126
1127        # Save build configuration
1128        data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
1129
1130        # convert the paths from absolute to relative to either the build directory or layer checkouts
1131        path_prefixes = []
1132
1133        if self.brbe is not None:
1134            _, be_id = self.brbe.split(":")
1135            be = BuildEnvironment.objects.get(pk = be_id)
1136            path_prefixes.append(be.builddir)
1137
1138        for layer in sorted(self.orm_wrapper.layer_version_objects, key = lambda x:len(x.local_path), reverse=True):
1139            path_prefixes.append(layer.local_path)
1140
1141        # we strip the prefixes
1142        for k in data:
1143            if not bool(data[k]['func']):
1144                for vh in data[k]['history']:
1145                    if not 'documentation.conf' in vh['file']:
1146                        abs_file_name = vh['file']
1147                        for pp in path_prefixes:
1148                            if abs_file_name.startswith(pp + "/"):
1149                                # preserve layer name in relative path
1150                                vh['file']=abs_file_name[pp.rfind("/")+1:]
1151                                break
1152
1153        # save the variables
1154        self.orm_wrapper.save_build_variables(build_obj, data)
1155
1156        return self.brbe
1157
1158    def set_recipes_to_parse(self, num_recipes):
1159        """
1160        Set the number of recipes which need to be parsed for this build.
1161        This is set the first time ParseStarted is received by toasterui.
1162        """
1163        self._ensure_build()
1164        self.internal_state['build'].recipes_to_parse = num_recipes
1165        self.internal_state['build'].save()
1166
1167    def set_recipes_parsed(self, num_recipes):
1168        """
1169        Set the number of recipes parsed so far for this build; this is updated
1170        each time a ParseProgress or ParseCompleted event is received by
1171        toasterui.
1172        """
1173        self._ensure_build()
1174        if num_recipes <= self.internal_state['build'].recipes_to_parse:
1175            self.internal_state['build'].recipes_parsed = num_recipes
1176            self.internal_state['build'].save()
1177
1178    def update_target_image_file(self, event):
1179        evdata = BuildInfoHelper._get_data_from_event(event)
1180
1181        for t in self.internal_state['targets']:
1182            if t.is_image:
1183                output_files = list(evdata.keys())
1184                for output in output_files:
1185                    if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
1186                        self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
1187
1188    def update_artifact_image_file(self, event):
1189        self._ensure_build()
1190        evdata = BuildInfoHelper._get_data_from_event(event)
1191        for artifact_path in evdata.keys():
1192            self.orm_wrapper.save_artifact_information(
1193                self.internal_state['build'], artifact_path,
1194                evdata[artifact_path])
1195
1196    def update_build_information(self, event, errors, warnings, taskfailures):
1197        self._ensure_build()
1198        self.orm_wrapper.update_build_stats_and_outcome(
1199            self.internal_state['build'], errors, warnings, taskfailures)
1200
1201    def store_started_task(self, event):
1202        assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped))
1203        assert 'taskfile' in vars(event)
1204        localfilepath = event.taskfile.split(":")[-1]
1205        assert localfilepath.startswith("/")
1206
1207        identifier = event.taskfile + ":" + event.taskname
1208
1209        recipe_information = self._get_recipe_information_from_taskfile(event.taskfile)
1210        recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
1211
1212        task_information = self._get_task_information(event, recipe)
1213        task_information['outcome'] = Task.OUTCOME_NA
1214
1215        if isinstance(event, bb.runqueue.runQueueTaskSkipped):
1216            assert 'reason' in vars(event)
1217            task_information['task_executed'] = False
1218            if event.reason == "covered":
1219                task_information['outcome'] = Task.OUTCOME_COVERED
1220            if event.reason == "existing":
1221                task_information['outcome'] = Task.OUTCOME_PREBUILT
1222        else:
1223            task_information['task_executed'] = True
1224            if 'noexec' in vars(event) and event.noexec:
1225                task_information['task_executed'] = False
1226                task_information['outcome'] = Task.OUTCOME_EMPTY
1227                task_information['script_type'] = Task.CODING_NA
1228
1229        # do not assign order numbers to scene tasks
1230        if not isinstance(event, bb.runqueue.sceneQueueTaskStarted):
1231            self.task_order += 1
1232            task_information['order'] = self.task_order
1233
1234        self.orm_wrapper.get_update_task_object(task_information)
1235
1236        self.internal_state['taskdata'][identifier] = {
1237                        'outcome': task_information['outcome'],
1238                    }
1239
1240
1241    def store_tasks_stats(self, event):
1242        self._ensure_build()
1243        task_data = BuildInfoHelper._get_data_from_event(event)
1244
1245        for (task_file, task_name, task_stats, recipe_name) in task_data:
1246            build = self.internal_state['build']
1247            self.orm_wrapper.update_task_object(build, task_name, recipe_name, task_stats)
1248
1249    def update_and_store_task(self, event):
1250        assert 'taskfile' in vars(event)
1251        localfilepath = event.taskfile.split(":")[-1]
1252        assert localfilepath.startswith("/")
1253
1254        identifier = event.taskfile + ":" + event.taskname
1255        if not identifier in self.internal_state['taskdata']:
1256            if isinstance(event, bb.build.TaskBase):
1257                # we do a bit of guessing
1258                candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
1259                if len(candidates) == 1:
1260                    identifier = candidates[0]
1261                elif len(candidates) > 1 and hasattr(event,'_package'):
1262                    if 'native-' in event._package:
1263                        identifier = 'native:' + identifier
1264                    if 'nativesdk-' in event._package:
1265                        identifier = 'nativesdk:' + identifier
1266                    candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
1267                    if len(candidates) == 1:
1268                        identifier = candidates[0]
1269
1270        assert identifier in self.internal_state['taskdata']
1271        identifierlist = identifier.split(":")
1272        realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1])
1273        recipe_information = self._get_recipe_information_from_taskfile(realtaskfile)
1274        recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
1275        task_information = self._get_task_information(event,recipe)
1276
1277        task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome']
1278
1279        if 'logfile' in vars(event):
1280            task_information['logfile'] = event.logfile
1281
1282        if '_message' in vars(event):
1283            task_information['message'] = event._message
1284
1285        if 'taskflags' in vars(event):
1286            # with TaskStarted, we get even more information
1287            if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1':
1288                task_information['script_type'] = Task.CODING_PYTHON
1289            else:
1290                task_information['script_type'] = Task.CODING_SHELL
1291
1292        if task_information['outcome'] == Task.OUTCOME_NA:
1293            if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)):
1294                task_information['outcome'] = Task.OUTCOME_SUCCESS
1295                del self.internal_state['taskdata'][identifier]
1296
1297            if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)):
1298                task_information['outcome'] = Task.OUTCOME_FAILED
1299                del self.internal_state['taskdata'][identifier]
1300
1301        # we force a sync point here, to get the progress bar to show
1302        if self.autocommit_step % 3 == 0:
1303            transaction.set_autocommit(True)
1304            transaction.set_autocommit(False)
1305        self.autocommit_step += 1
1306
1307        self.orm_wrapper.get_update_task_object(task_information, True) # must exist
1308
1309
1310    def store_missed_state_tasks(self, event):
1311        for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
1312
1313            # identifier = fn + taskname + "_setscene"
1314            recipe_information = self._get_recipe_information_from_taskfile(fn)
1315            recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1316            mevent = MockEvent()
1317            mevent.taskname = taskname
1318            mevent.taskhash = taskhash
1319            task_information = self._get_task_information(mevent,recipe)
1320
1321            task_information['start_time'] = timezone.now()
1322            task_information['outcome'] = Task.OUTCOME_NA
1323            task_information['sstate_checksum'] = taskhash
1324            task_information['sstate_result'] = Task.SSTATE_MISS
1325            task_information['path_to_sstate_obj'] = sstatefile
1326
1327            self.orm_wrapper.get_update_task_object(task_information)
1328
1329        for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
1330
1331            # identifier = fn + taskname + "_setscene"
1332            recipe_information = self._get_recipe_information_from_taskfile(fn)
1333            recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1334            mevent = MockEvent()
1335            mevent.taskname = taskname
1336            mevent.taskhash = taskhash
1337            task_information = self._get_task_information(mevent,recipe)
1338
1339            task_information['path_to_sstate_obj'] = sstatefile
1340
1341            self.orm_wrapper.get_update_task_object(task_information)
1342
1343
1344    def store_target_package_data(self, event):
1345        self._ensure_build()
1346
1347        # for all image targets
1348        for target in self.internal_state['targets']:
1349            if target.is_image:
1350                pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
1351                imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'].get(target.target, {})
1352                filedata = BuildInfoHelper._get_data_from_event(event)['filedata'].get(target.target, {})
1353
1354                try:
1355                    self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'], built_package=True)
1356                    self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata.copy(), pkgdata, self.internal_state['recipes'], built_package=False)
1357                except KeyError as e:
1358                    logger.warning("KeyError in save_target_package_information"
1359                                   "%s ", e)
1360
1361                # only try to find files in the image if the task for this
1362                # target is one which produces image files; otherwise, the old
1363                # list of files in the files-in-image.txt file will be
1364                # appended to the target even if it didn't produce any images
1365                if target.task in BuildInfoHelper.IMAGE_GENERATING_TASKS:
1366                    try:
1367                        self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
1368                    except KeyError as e:
1369                        logger.warning("KeyError in save_target_file_information"
1370                                       "%s ", e)
1371
1372
1373
1374    def cancel_cli_build(self):
1375        """
1376        If a build is currently underway, set its state to CANCELLED;
1377        note that this only gets called for command line builds which are
1378        interrupted, so it doesn't touch any BuildRequest objects
1379        """
1380        self._ensure_build()
1381        self.internal_state['build'].outcome = Build.CANCELLED
1382        self.internal_state['build'].save()
1383        signal_runbuilds()
1384
1385    def store_dependency_information(self, event):
1386        assert '_depgraph' in vars(event)
1387        assert 'layer-priorities' in event._depgraph
1388        assert 'pn' in event._depgraph
1389        assert 'tdepends' in event._depgraph
1390
1391        errormsg = []
1392
1393        # save layer version priorities
1394        if 'layer-priorities' in event._depgraph.keys():
1395            for lv in event._depgraph['layer-priorities']:
1396                (_, path, _, priority) = lv
1397                layer_version_obj = self._get_layer_version_for_dependency(path)
1398                if layer_version_obj:
1399                    layer_version_obj.priority = priority
1400                    layer_version_obj.save()
1401
1402        # save recipe information
1403        self.internal_state['recipes'] = {}
1404        for pn in event._depgraph['pn']:
1405
1406            file_name = event._depgraph['pn'][pn]['filename'].split(":")[-1]
1407            pathflags = ":".join(sorted(event._depgraph['pn'][pn]['filename'].split(":")[:-1]))
1408            layer_version_obj = self._get_layer_version_for_path(file_name)
1409
1410            assert layer_version_obj is not None
1411
1412            recipe_info = {}
1413            recipe_info['name'] = pn
1414            recipe_info['layer_version'] = layer_version_obj
1415
1416            if 'version' in event._depgraph['pn'][pn]:
1417                recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
1418
1419            if 'summary' in event._depgraph['pn'][pn]:
1420                recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
1421
1422            if 'license' in event._depgraph['pn'][pn]:
1423                recipe_info['license'] = event._depgraph['pn'][pn]['license']
1424
1425            if 'description' in event._depgraph['pn'][pn]:
1426                recipe_info['description'] = event._depgraph['pn'][pn]['description']
1427
1428            if 'section' in event._depgraph['pn'][pn]:
1429                recipe_info['section'] = event._depgraph['pn'][pn]['section']
1430
1431            if 'homepage' in event._depgraph['pn'][pn]:
1432                recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
1433
1434            if 'bugtracker' in event._depgraph['pn'][pn]:
1435                recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
1436
1437            recipe_info['file_path'] = file_name
1438            recipe_info['pathflags'] = pathflags
1439
1440            if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
1441                recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
1442            else:
1443                raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
1444
1445            recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
1446            recipe.is_image = False
1447            if 'inherits' in event._depgraph['pn'][pn].keys():
1448                for cls in event._depgraph['pn'][pn]['inherits']:
1449                    if cls.endswith('/image.bbclass'):
1450                        recipe.is_image = True
1451                        recipe_info['is_image'] = True
1452                        # Save the is_image state to the relevant recipe objects
1453                        self.orm_wrapper.get_update_recipe_object(recipe_info)
1454                        break
1455            if recipe.is_image:
1456                for t in self.internal_state['targets']:
1457                    if pn == t.target:
1458                        t.is_image = True
1459                        t.save()
1460            self.internal_state['recipes'][pn] = recipe
1461
1462        # we'll not get recipes for key w/ values listed in ASSUME_PROVIDED
1463
1464        assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split()
1465
1466        # save recipe dependency
1467        # buildtime
1468        recipedeps_objects = []
1469        for recipe in event._depgraph['depends']:
1470           target = self.internal_state['recipes'][recipe]
1471           for dep in event._depgraph['depends'][recipe]:
1472                if dep in assume_provided:
1473                    continue
1474                via = None
1475                if 'providermap' in event._depgraph and dep in event._depgraph['providermap']:
1476                    deprecipe = event._depgraph['providermap'][dep][0]
1477                    dependency = self.internal_state['recipes'][deprecipe]
1478                    via = Provides.objects.get_or_create(name=dep,
1479                                                         recipe=dependency)[0]
1480                elif dep in self.internal_state['recipes']:
1481                    dependency = self.internal_state['recipes'][dep]
1482                else:
1483                    errormsg.append("  stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep))
1484                    continue
1485                recipe_dep = Recipe_Dependency(recipe=target,
1486                                               depends_on=dependency,
1487                                               via=via,
1488                                               dep_type=Recipe_Dependency.TYPE_DEPENDS)
1489                recipedeps_objects.append(recipe_dep)
1490
1491        Recipe_Dependency.objects.bulk_create(recipedeps_objects)
1492
1493        # save all task information
1494        def _save_a_task(taskdesc):
1495            spec = re.split(r'\.', taskdesc)
1496            pn = ".".join(spec[0:-1])
1497            taskname = spec[-1]
1498            e = event
1499            e.taskname = pn
1500            recipe = self.internal_state['recipes'][pn]
1501            task_info = self._get_task_information(e, recipe)
1502            task_info['task_name'] = taskname
1503            task_obj = self.orm_wrapper.get_update_task_object(task_info)
1504            return task_obj
1505
1506        # create tasks
1507        tasks = {}
1508        for taskdesc in event._depgraph['tdepends']:
1509            tasks[taskdesc] = _save_a_task(taskdesc)
1510
1511        # create dependencies between tasks
1512        taskdeps_objects = []
1513        for taskdesc in event._depgraph['tdepends']:
1514            target = tasks[taskdesc]
1515            for taskdep in event._depgraph['tdepends'][taskdesc]:
1516                if taskdep not in tasks:
1517                    # Fetch tasks info is not collected previously
1518                    dep = _save_a_task(taskdep)
1519                else:
1520                    dep = tasks[taskdep]
1521                taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
1522        Task_Dependency.objects.bulk_create(taskdeps_objects)
1523
1524        if errormsg:
1525            logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", "".join(errormsg))
1526
1527
1528    def store_build_package_information(self, event):
1529        self._ensure_build()
1530
1531        package_info = BuildInfoHelper._get_data_from_event(event)
1532        self.orm_wrapper.save_build_package_information(
1533            self.internal_state['build'],
1534            package_info,
1535            self.internal_state['recipes'],
1536            built_package=True)
1537
1538        self.orm_wrapper.save_build_package_information(
1539            self.internal_state['build'],
1540            package_info,
1541            self.internal_state['recipes'],
1542            built_package=False)
1543
1544    def _store_build_done(self, errorcode):
1545        logger.info("Build exited with errorcode %d", errorcode)
1546
1547        if not self.brbe:
1548            return
1549
1550        br_id, be_id = self.brbe.split(":")
1551
1552        br = BuildRequest.objects.get(pk = br_id)
1553
1554        # if we're 'done' because we got cancelled update the build outcome
1555        if br.state == BuildRequest.REQ_CANCELLING:
1556            logger.info("Build cancelled")
1557            br.build.outcome = Build.CANCELLED
1558            br.build.save()
1559            self.internal_state['build'] = br.build
1560            errorcode = 0
1561
1562        if errorcode == 0:
1563            # request archival of the project artifacts
1564            br.state = BuildRequest.REQ_COMPLETED
1565        else:
1566            br.state = BuildRequest.REQ_FAILED
1567        br.save()
1568
1569        be = BuildEnvironment.objects.get(pk = be_id)
1570        be.lock = BuildEnvironment.LOCK_FREE
1571        be.save()
1572        signal_runbuilds()
1573
1574    def store_log_error(self, text):
1575        mockevent = MockEvent()
1576        mockevent.levelno = formatter.ERROR
1577        mockevent.msg = text
1578        mockevent.pathname = '-- None'
1579        mockevent.lineno = LogMessage.ERROR
1580        self.store_log_event(mockevent)
1581
1582    def store_log_exception(self, text, backtrace = ""):
1583        mockevent = MockEvent()
1584        mockevent.levelno = -1
1585        mockevent.msg = text
1586        mockevent.pathname = backtrace
1587        mockevent.lineno = -1
1588        self.store_log_event(mockevent)
1589
1590    def store_log_event(self, event,cli_backlog=True):
1591        self._ensure_build()
1592
1593        if event.levelno < formatter.WARNING:
1594            return
1595
1596        # early return for CLI builds
1597        if cli_backlog and self.brbe is None:
1598            if not 'backlog' in self.internal_state:
1599                self.internal_state['backlog'] = []
1600            self.internal_state['backlog'].append(event)
1601            return
1602
1603        if 'backlog' in self.internal_state:
1604            # if we have a backlog of events, do our best to save them here
1605            if self.internal_state['backlog']:
1606                tempevent = self.internal_state['backlog'].pop()
1607                logger.debug("buildinfohelper: Saving stored event %s "
1608                             % tempevent)
1609                self.store_log_event(tempevent,cli_backlog)
1610            else:
1611                logger.info("buildinfohelper: All events saved")
1612                del self.internal_state['backlog']
1613
1614        log_information = {}
1615        log_information['build'] = self.internal_state['build']
1616        if event.levelno == formatter.CRITICAL:
1617            log_information['level'] = LogMessage.CRITICAL
1618        elif event.levelno == formatter.ERROR:
1619            log_information['level'] = LogMessage.ERROR
1620        elif event.levelno == formatter.WARNING:
1621            log_information['level'] = LogMessage.WARNING
1622        elif event.levelno == -2:   # toaster self-logging
1623            log_information['level'] = -2
1624        else:
1625            log_information['level'] = LogMessage.INFO
1626
1627        log_information['message'] = event.getMessage()
1628        log_information['pathname'] = event.pathname
1629        log_information['lineno'] = event.lineno
1630        logger.info("Logging error 2: %s", log_information)
1631
1632        self.orm_wrapper.create_logmessage(log_information)
1633
1634    def _get_filenames_from_image_license(self, image_license_manifest_path):
1635        """
1636        Find the FILES line in the image_license.manifest file,
1637        which has the basenames of the bzImage and modules files
1638        in this format:
1639        FILES: bzImage--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.bin modules--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.tgz
1640        """
1641        files = []
1642        with open(image_license_manifest_path) as image_license:
1643            for line in image_license:
1644                if line.startswith('FILES'):
1645                    files_str = line.split(':')[1].strip()
1646                    files_str = re.sub(r' {2,}', ' ', files_str)
1647
1648                    # ignore lines like "FILES:" with no filenames
1649                    if files_str:
1650                        files += files_str.split(' ')
1651        return files
1652
1653    def _endswith(self, str_to_test, endings):
1654        """
1655        Returns True if str ends with one of the strings in the list
1656        endings, False otherwise
1657        """
1658        endswith = False
1659        for ending in endings:
1660            if str_to_test.endswith(ending):
1661                endswith = True
1662                break
1663        return endswith
1664
1665    def scan_task_artifacts(self, event):
1666        """
1667        The 'TaskArtifacts' event passes the manifest file content for the
1668        tasks 'do_deploy', 'do_image_complete', 'do_populate_sdk', and
1669        'do_populate_sdk_ext'. The first two will be implemented later.
1670        """
1671        task_vars = BuildInfoHelper._get_data_from_event(event)
1672        task_name = task_vars['task'][task_vars['task'].find(':')+1:]
1673        task_artifacts = task_vars['artifacts']
1674
1675        if task_name in ['do_populate_sdk', 'do_populate_sdk_ext']:
1676            targets = [target for target in self.internal_state['targets'] \
1677                if target.task == task_name[3:]]
1678            if not targets:
1679                logger.warning("scan_task_artifacts: SDK targets not found: %s\n", task_name)
1680                return
1681            for artifact_path in task_artifacts:
1682                if not os.path.isfile(artifact_path):
1683                    logger.warning("scan_task_artifacts: artifact file not found: %s\n", artifact_path)
1684                    continue
1685                for target in targets:
1686                    # don't record the file if it's already been added
1687                    # to this target
1688                    matching_files = TargetSDKFile.objects.filter(
1689                        target=target, file_name=artifact_path)
1690                    if matching_files.count() == 0:
1691                        artifact_size = os.stat(artifact_path).st_size
1692                        self.orm_wrapper.save_target_sdk_file(
1693                            target, artifact_path, artifact_size)
1694
1695    def _get_image_files(self, deploy_dir_image, image_name, image_file_extensions):
1696        """
1697        Find files in deploy_dir_image whose basename starts with the
1698        string image_name and ends with one of the strings in
1699        image_file_extensions.
1700
1701        Returns a list of file dictionaries like
1702
1703        [
1704            {
1705                'path': '/path/to/image/file',
1706                'size': <file size in bytes>
1707            }
1708        ]
1709        """
1710        image_files = []
1711
1712        for dirpath, _, filenames in os.walk(deploy_dir_image):
1713            for filename in filenames:
1714                if filename.startswith(image_name) and \
1715                self._endswith(filename, image_file_extensions):
1716                    image_file_path = os.path.join(dirpath, filename)
1717                    image_file_size = os.stat(image_file_path).st_size
1718
1719                    image_files.append({
1720                        'path': image_file_path,
1721                        'size': image_file_size
1722                    })
1723
1724        return image_files
1725
1726    def scan_image_artifacts(self):
1727        """
1728        Scan for built image artifacts in DEPLOY_DIR_IMAGE and associate them
1729        with a Target object in self.internal_state['targets'].
1730
1731        We have two situations to handle:
1732
1733        1. This is the first time a target + machine has been built, so
1734        add files from the DEPLOY_DIR_IMAGE to the target.
1735
1736        OR
1737
1738        2. There are no new files for the target (they were already produced by
1739        a previous build), so copy them from the most recent previous build with
1740        the same target, task and machine.
1741        """
1742        deploy_dir_image = \
1743            self.server.runCommand(['getVariable', 'DEPLOY_DIR_IMAGE'])[0]
1744
1745        # if there's no DEPLOY_DIR_IMAGE, there aren't going to be
1746        # any image artifacts, so we can return immediately
1747        if not deploy_dir_image:
1748            return
1749
1750        buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0]
1751        machine = self.server.runCommand(['getVariable', 'MACHINE'])[0]
1752
1753        # location of the manifest files for this build;
1754        # note that this file is only produced if an image is produced
1755        license_directory = \
1756            self.server.runCommand(['getVariable', 'LICENSE_DIRECTORY'])[0]
1757
1758        # file name extensions for image files
1759        image_file_extensions_unique = {}
1760        image_fstypes = self.server.runCommand(
1761            ['getVariable', 'IMAGE_FSTYPES'])[0]
1762        if image_fstypes is not None:
1763            image_types_str = image_fstypes.strip()
1764            image_file_extensions = re.sub(r' {2,}', ' ', image_types_str)
1765            image_file_extensions_unique = set(image_file_extensions.split(' '))
1766
1767        targets = self.internal_state['targets']
1768
1769        # filter out anything which isn't an image target
1770        image_targets = [target for target in targets if target.is_image]
1771
1772        if len(image_targets) > 0:
1773            #if there are image targets retrieve image_name
1774            image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
1775            if not image_name:
1776                #When build target is an image and image_name is not found as an environment variable
1777                logger.info("IMAGE_NAME not found, extracting from bitbake command")
1778                cmd = self.server.runCommand(['getVariable','BB_CMDLINE'])[0]
1779                #filter out tokens that are command line options
1780                cmd = [token for token in cmd if not token.startswith('-')]
1781                image_name = cmd[1].split(':', 1)[0] # remove everything after : in image name
1782                logger.info("IMAGE_NAME found as : %s " % image_name)
1783
1784        for image_target in image_targets:
1785            # this is set to True if we find at least one file relating to
1786            # this target; if this remains False after the scan, we copy the
1787            # files from the most-recent Target with the same target + machine
1788            # onto this Target instead
1789            has_files = False
1790
1791            # we construct this because by the time we reach
1792            # BuildCompleted, this has reset to
1793            # 'defaultpkgname-<MACHINE>-<BUILDNAME>';
1794            # we need to change it to
1795            # <TARGET>-<MACHINE>-<BUILDNAME>
1796            real_image_name = re.sub(r'^defaultpkgname', image_target.target,
1797                image_name)
1798
1799            image_license_manifest_path = os.path.join(
1800                license_directory,
1801                real_image_name,
1802                'image_license.manifest')
1803
1804            image_package_manifest_path = os.path.join(
1805                license_directory,
1806                real_image_name,
1807                'image_license.manifest')
1808
1809            # if image_license.manifest exists, we can read the names of
1810            # bzImage, modules etc. files for this build from it, then look for
1811            # them in the DEPLOY_DIR_IMAGE; note that this file is only produced
1812            # if an image file was produced
1813            if os.path.isfile(image_license_manifest_path):
1814                has_files = True
1815
1816                basenames = self._get_filenames_from_image_license(
1817                    image_license_manifest_path)
1818
1819                for basename in basenames:
1820                    artifact_path = os.path.join(deploy_dir_image, basename)
1821                    if not os.path.exists(artifact_path):
1822                        logger.warning("artifact %s doesn't exist, skipping" % artifact_path)
1823                        continue
1824                    artifact_size = os.stat(artifact_path).st_size
1825
1826                    # note that the artifact will only be saved against this
1827                    # build if it hasn't been already
1828                    self.orm_wrapper.save_target_kernel_file(image_target,
1829                        artifact_path, artifact_size)
1830
1831                # store the license manifest path on the target
1832                # (this file is also created any time an image file is created)
1833                license_manifest_path = os.path.join(license_directory,
1834                    real_image_name, 'license.manifest')
1835
1836                self.orm_wrapper.update_target_set_license_manifest(
1837                    image_target, license_manifest_path)
1838
1839                # store the package manifest path on the target (this file
1840                # is created any time an image file is created)
1841                package_manifest_path = os.path.join(deploy_dir_image,
1842                    real_image_name + '.rootfs.manifest')
1843
1844                if os.path.exists(package_manifest_path):
1845                    self.orm_wrapper.update_target_set_package_manifest(
1846                        image_target, package_manifest_path)
1847
1848            # scan the directory for image files relating to this build
1849            # (via real_image_name); note that we don't have to set
1850            # has_files = True, as searching for the license manifest file
1851            # will already have set it to true if at least one image file was
1852            # produced; note that the real_image_name includes BUILDNAME, which
1853            # in turn includes a timestamp; so if no files were produced for
1854            # this timestamp (i.e. the build reused existing image files already
1855            # in the directory), no files will be recorded against this target
1856            image_files = self._get_image_files(deploy_dir_image,
1857                real_image_name, image_file_extensions_unique)
1858
1859            for image_file in image_files:
1860                self.orm_wrapper.save_target_image_file_information(
1861                    image_target, image_file['path'], image_file['size'])
1862
1863            if not has_files:
1864                # copy image files and build artifacts from the
1865                # most-recently-built Target with the
1866                # same target + machine as this Target; also copy the license
1867                # manifest path, as that is not treated as an artifact and needs
1868                # to be set separately
1869                similar_target = \
1870                    self.orm_wrapper.get_similar_target_with_image_files(
1871                        image_target)
1872
1873                if similar_target:
1874                    logger.info('image artifacts for target %s cloned from ' \
1875                        'target %s' % (image_target.pk, similar_target.pk))
1876                    self.orm_wrapper.clone_image_artifacts(similar_target,
1877                        image_target)
1878
1879    def _get_sdk_targets(self):
1880        """
1881        Return targets which could generate SDK artifacts, i.e.
1882        "do_populate_sdk" and "do_populate_sdk_ext".
1883        """
1884        return [target for target in self.internal_state['targets'] \
1885            if target.task in ['populate_sdk', 'populate_sdk_ext']]
1886
1887    def scan_sdk_artifacts(self, event):
1888        """
1889        Note that we have to intercept an SDKArtifactInfo event from
1890        toaster.bbclass (via toasterui) to get hold of the SDK variables we
1891        need to be able to scan for files accurately: this is because
1892        variables like TOOLCHAIN_OUTPUTNAME have reset to None by the time
1893        BuildCompleted is fired by bitbake, so we have to get those values
1894        while the build is still in progress.
1895
1896        For populate_sdk_ext, this runs twice, with two different
1897        TOOLCHAIN_OUTPUTNAME settings, each of which will capture some of the
1898        files in the SDK output directory.
1899        """
1900        sdk_vars = BuildInfoHelper._get_data_from_event(event)
1901        toolchain_outputname = sdk_vars['TOOLCHAIN_OUTPUTNAME']
1902
1903        # targets which might have created SDK artifacts
1904        sdk_targets = self._get_sdk_targets()
1905
1906        # location of SDK artifacts
1907        tmpdir = self.server.runCommand(['getVariable', 'TMPDIR'])[0]
1908        sdk_dir = os.path.join(tmpdir, 'deploy', 'sdk')
1909
1910        # all files in the SDK directory
1911        artifacts = []
1912        for dir_path, _, filenames in os.walk(sdk_dir):
1913            for filename in filenames:
1914                full_path = os.path.join(dir_path, filename)
1915                if not os.path.islink(full_path):
1916                    artifacts.append(full_path)
1917
1918        for sdk_target in sdk_targets:
1919            # find files in the SDK directory which haven't already been
1920            # recorded against a Target and whose basename matches
1921            # TOOLCHAIN_OUTPUTNAME
1922            for artifact_path in artifacts:
1923                basename = os.path.basename(artifact_path)
1924
1925                toolchain_match = basename.startswith(toolchain_outputname)
1926
1927                # files which match the name of the target which produced them;
1928                # for example,
1929                # poky-glibc-x86_64-core-image-sato-i586-toolchain-ext-2.1+snapshot.sh
1930                target_match = re.search(sdk_target.target, basename)
1931
1932                # targets which produce "*-nativesdk-*" files
1933                is_ext_sdk_target = sdk_target.task in \
1934                    ['do_populate_sdk_ext', 'populate_sdk_ext']
1935
1936                # SDK files which don't match the target name, i.e.
1937                # x86_64-nativesdk-libc.*
1938                # poky-glibc-x86_64-buildtools-tarball-i586-buildtools-nativesdk-standalone-2.1+snapshot*
1939                is_ext_sdk_file = re.search('-nativesdk-', basename)
1940
1941                file_from_target = (toolchain_match and target_match) or \
1942                    (is_ext_sdk_target and is_ext_sdk_file)
1943
1944                if file_from_target:
1945                    # don't record the file if it's already been added to this
1946                    # target
1947                    matching_files = TargetSDKFile.objects.filter(
1948                        target=sdk_target, file_name=artifact_path)
1949
1950                    if matching_files.count() == 0:
1951                        artifact_size = os.stat(artifact_path).st_size
1952
1953                        self.orm_wrapper.save_target_sdk_file(
1954                            sdk_target, artifact_path, artifact_size)
1955
1956    def clone_required_sdk_artifacts(self):
1957        """
1958        If an SDK target doesn't have any SDK artifacts, this means that
1959        the postfuncs of populate_sdk or populate_sdk_ext didn't fire, which
1960        in turn means that the targets of this build didn't generate any new
1961        artifacts.
1962
1963        In this case, clone SDK artifacts for targets in the current build
1964        from existing targets for this build.
1965        """
1966        sdk_targets = self._get_sdk_targets()
1967        for sdk_target in sdk_targets:
1968            # only clone for SDK targets which have no TargetSDKFiles yet
1969            if sdk_target.targetsdkfile_set.all().count() == 0:
1970                similar_target = \
1971                    self.orm_wrapper.get_similar_target_with_sdk_files(
1972                        sdk_target)
1973                if similar_target:
1974                    logger.info('SDK artifacts for target %s cloned from ' \
1975                        'target %s' % (sdk_target.pk, similar_target.pk))
1976                    self.orm_wrapper.clone_sdk_artifacts(similar_target,
1977                        sdk_target)
1978
1979    def close(self, errorcode):
1980        self._store_build_done(errorcode)
1981
1982        if 'backlog' in self.internal_state:
1983            # we save missed events in the database for the current build
1984            tempevent = self.internal_state['backlog'].pop()
1985            # Do not skip command line build events
1986            self.store_log_event(tempevent,False)
1987
1988
1989        # unset the brbe; this is to prevent subsequent command-line builds
1990        # being incorrectly attached to the previous Toaster-triggered build;
1991        # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=9021
1992        self.brbe = None
1993
1994        # unset the internal Build object to prevent it being reused for the
1995        # next build
1996        self.internal_state['build'] = None
1997