Exemple #1
0
    def __upload_decisions(self, job):
        uploaded_map = {}
        decisions_data = self.__read_json_file('{}.json'.format(Decision.__name__))
        if not decisions_data:
            # The job is without reports
            return

        # Upload decisions
        for decision in decisions_data:
            if 'id' not in decision or not isinstance(decision['id'], int):
                raise exceptions.ValidationError({'decision': _('Decision data is corrupted')})
            serializer = DownloadDecisionSerializer(data=decision)
            serializer.is_valid(raise_exception=True)
            uploaded_map[decision['id']] = serializer.save(
                job=job, operator=self._user, status=DECISION_STATUS[0][0]
            ).id
            self._final_statuses[uploaded_map[decision['id']]] = serializer.validated_data['status']

        # Upload decision cache
        cache_data_list = self.__read_json_file('{}.json'.format(DecisionCache.__name__))
        new_cache_objects = []
        for dec_cache in cache_data_list:
            if 'decision' not in dec_cache or not uploaded_map.get(dec_cache['decision']):
                raise exceptions.ValidationError({'decision': _('Decision data is corrupted')})
            serializer = DecisionCacheSerializer(data=dec_cache)
            serializer.is_valid(raise_exception=True)
            new_cache_objects.append(DecisionCache(
                decision_id=uploaded_map[dec_cache['decision']], **serializer.validated_data
            ))
        DecisionCache.objects.bulk_create(new_cache_objects)

        return uploaded_map
Exemple #2
0
 def __recalc(self):
     cache_data = {}
     for report in ReportComponent.objects.filter(
             decision__in=self._decisions):
         if (report.decision_id, report.component) not in cache_data:
             cache_data[(report.decision_id, report.component)] = {
                 'total': 0,
                 'finished': 0,
                 'cpu_time': 0,
                 'wall_time': 0,
                 'memory': 0
             }
         cache_data[(report.decision_id, report.component)]['total'] += 1
         if report.finish_date:
             cache_data[(report.decision_id,
                         report.component)]['finished'] += 1
         if report.cpu_time:
             cache_data[(report.decision_id,
                         report.component)]['cpu_time'] += report.cpu_time
         if report.wall_time:
             cache_data[(report.decision_id,
                         report.component)]['wall_time'] += report.wall_time
         if report.memory:
             cache_data[(report.decision_id,
                         report.component)]['memory'] = max(
                             cache_data[(report.decision_id,
                                         report.component)]['memory'],
                             report.memory)
     DecisionCache.objects.filter(decision_id__in=self._decisions).delete()
     DecisionCache.objects.bulk_create(
         list(
             DecisionCache(
                 decision_id=d_id, component=component, **obj_kwargs)
             for (d_id, component), obj_kwargs in cache_data.items()))
Exemple #3
0
 def __update_decision_cache(self, component, **kwargs):
     try:
         cache_obj = DecisionCache.objects.select_for_update().get(decision=self.decision, component=component)
     except DecisionCache.DoesNotExist:
         cache_obj = DecisionCache(decision=self.decision, component=component)
     if kwargs.get('cpu_time'):
         cache_obj.cpu_time += kwargs['cpu_time']
     if kwargs.get('wall_time'):
         cache_obj.wall_time += kwargs['wall_time']
     if kwargs.get('memory'):
         cache_obj.memory = max(cache_obj.memory, kwargs['memory'])
     if kwargs.get('started'):
         cache_obj.total += 1
     if kwargs.get('finished'):
         cache_obj.finished += 1
     cache_obj.save()
Exemple #4
0
    def __upload_decisions(self):
        decisions_data = self.__read_json_file('{}.json'.format(
            Decision.__name__))
        if not decisions_data:
            # The job is without reports
            return

        # Upload decisions
        for decision in decisions_data:
            if 'id' not in decision or not isinstance(decision['id'], int):
                raise exceptions.ValidationError(
                    {'decision': _('Decision data is corrupted')})
            serializer = DownloadDecisionSerializer(data=decision)
            serializer.is_valid(raise_exception=True)
            decision_obj = serializer.save(job=self.job,
                                           operator=self._upload_obj.author,
                                           status=DECISION_STATUS[0][0])
            self._decisions[decision['id']] = decision_obj.id
            self._identifiers_in_use[decision_obj.id] = set()
            self._final_statuses[
                decision_obj.id] = serializer.validated_data['status']

        if not self._decisions:
            # The job does not have decisions
            return

        # Upload decision cache
        cache_data_list = self.__read_json_file('{}.json'.format(
            DecisionCache.__name__))
        if not cache_data_list:
            # All decisions should have cache
            raise exceptions.ValidationError(
                {'decision': _('Decision data is corrupted')})

        new_cache_objects = []
        for dec_cache in cache_data_list:
            if 'decision' not in dec_cache or not self._decisions.get(
                    dec_cache['decision']):
                raise exceptions.ValidationError(
                    {'decision': _('Decision data is corrupted')})
            serializer = DecisionCacheSerializer(data=dec_cache)
            serializer.is_valid(raise_exception=True)
            new_cache_objects.append(
                DecisionCache(
                    decision_id=self._decisions[dec_cache['decision']],
                    **serializer.validated_data))
        DecisionCache.objects.bulk_create(new_cache_objects)