def _import_interaction(self, interaction): """Real import function""" hostname = interaction['hostname'] stats = etree.fromstring(interaction['stats']) metadata = interaction['metadata'] server = metadata['server'] client = cache.get(hostname) if not client: client, created = Client.objects.get_or_create(name=hostname) if created: self.logger.debug("Client %s added to the db" % hostname) cache.set(hostname, client) timestamp = datetime(*strptime(stats.get('time'))[0:6]) if len(Interaction.objects.filter(client=client, timestamp=timestamp)) > 0: self.logger.warn("Interaction for %s at %s already exists" % (hostname, timestamp)) return if 'profile' in metadata: profile, created = \ Group.objects.get_or_create(name=metadata['profile']) else: profile = None flags = {'dry_run': False, 'only_important': False} for flag in stats.findall('./Flags/Flag'): value = flag.get('value', default='false').lower() == 'true' name = flag.get('name') if name in flags: flags[name] = value inter = Interaction(client=client, timestamp=timestamp, state=stats.get('state', default="unknown"), repo_rev_code=stats.get('revision', default="unknown"), good_count=stats.get('good', default="0"), total_count=stats.get('total', default="0"), server=server, profile=profile, **flags) inter.save() self.logger.debug("Interaction for %s at %s with INSERTED in to db" % (client.id, timestamp)) # FIXME - this should be more efficient for group_name in metadata['groups']: group = cache.get("GROUP_" + group_name) if not group: group, created = Group.objects.get_or_create(name=group_name) if created: self.logger.debug("Added group %s" % group) cache.set("GROUP_" + group_name, group) inter.groups.add(group) for bundle_name in metadata.get('bundles', []): bundle = cache.get("BUNDLE_" + bundle_name) if not bundle: bundle, created = \ Bundle.objects.get_or_create(name=bundle_name) if created: self.logger.debug("Added bundle %s" % bundle) cache.set("BUNDLE_" + bundle_name, bundle) inter.bundles.add(bundle) inter.save() counter_fields = {TYPE_BAD: 0, TYPE_MODIFIED: 0, TYPE_EXTRA: 0} pattern = [('Bad/*', TYPE_BAD), ('Extra/*', TYPE_EXTRA), ('Modified/*', TYPE_MODIFIED)] updates = dict([(etype, []) for etype in Interaction.entry_types]) for (xpath, state) in pattern: for entry in stats.findall(xpath): counter_fields[state] = counter_fields[state] + 1 # handle server failures differently failure = entry.get('failure', '') if failure: act_dict = dict(name=entry.get("name"), entry_type=entry.tag, message=failure) newact = FailureEntry.entry_get_or_create(act_dict) updates['failures'].append(newact) continue updatetype = entry.tag.lower() + "s" update = getattr(self, "_import_%s" % entry.tag, self._import_unknown)(entry, state) if update is not None: updates[updatetype].append(update) inter.bad_count = counter_fields[TYPE_BAD] inter.modified_count = counter_fields[TYPE_MODIFIED] inter.extra_count = counter_fields[TYPE_EXTRA] inter.save() for entry_type in updates.keys(): # batch this for sqlite i = 0 while (i < len(updates[entry_type])): getattr(inter, entry_type).add(*updates[entry_type][i:i + 100]) i += 100 # performance metrics for times in stats.findall('OpStamps'): for metric, value in list(times.items()): Performance(interaction=inter, metric=metric, value=value).save()