class KleverCoreArchiveGen: def __init__(self, decision): self.decision = decision self.arcname = 'VJ__{}.zip'.format(decision.identifier) self.stream = ZipStream() def __iter__(self): for file_inst in FileSystem.objects.filter( decision=self.decision).select_related('file'): arch_name = '/'.join(['root', file_inst.name]) file_src = '/'.join( [settings.MEDIA_ROOT, file_inst.file.file.name]) for data in self.stream.compress_file(file_src, arch_name): yield data yield self.stream.close_stream()
class FilesForCompetitionArchive: obj_attr = 'Program fragment' requirement_attr = 'Requirements specification' def __init__(self, decision, filters): self.decision = decision self._attrs = self.__get_attrs() self._archives = self.__get_archives() self._archives_to_upload = [] self.__get_archives_to_upload(filters) self.stream = ZipStream() self.name = 'svcomp.zip' def __iter__(self): cnt = 0 names_in_use = set() for arch_path, name_pattern in self._archives_to_upload: if name_pattern in names_in_use: cnt += 1 arch_name = '%s_%s.zip' % (name_pattern, cnt) else: arch_name = '%s.zip' % name_pattern names_in_use.add(name_pattern) for data in self.stream.compress_file(arch_path, arch_name): yield data yield self.stream.close_stream() def __get_archives(self): archives = {} for report in ReportComponent.objects.filter(decision=self.decision, verification=True)\ .exclude(verifier_files='').only('id', 'verifier_files'): archives[report.id] = report.verifier_files.path return archives def __get_attrs(self): # Select attributes for all safes, unsafes and unknowns attrs = {} for report_id, a_name, a_value in ReportAttr.objects\ .filter(report__decision=self.decision, name__in=[self.obj_attr, self.requirement_attr]) \ .exclude(report__reportunsafe=None, report__reportsafe=None, report__reportunknown=None) \ .values_list('report_id', 'name', 'value'): if report_id not in attrs: attrs[report_id] = {} attrs[report_id][a_name] = a_value return attrs def __add_archive(self, r_type, r_id, p_id): if p_id in self._archives and r_id in self._attrs \ and self.obj_attr in self._attrs[r_id] \ and self.requirement_attr in self._attrs[r_id]: ver_obj = self._attrs[r_id][self.obj_attr].replace('~', 'HOME').replace('/', '---') ver_requirement = self._attrs[r_id][self.requirement_attr].replace(':', '-') dirname = 'Unknowns' if r_type == 'f' else 'Unsafes' if r_type == 'u' else 'Safes' self._archives_to_upload.append( (self._archives[p_id], '{0}/{1}__{2}__{3}'.format(dirname, r_type, ver_requirement, ver_obj)) ) def __get_archives_to_upload(self, filters): common_filters = {'decision': self.decision, 'parent__reportcomponent__verification': True} if filters.get('safes'): for r_id, p_id in ReportSafe.objects.filter(**common_filters).values_list('id', 'parent_id'): self.__add_archive('s', r_id, p_id) if filters.get('unsafes'): for r_id, p_id in ReportUnsafe.objects.filter(**common_filters).values_list('id', 'parent_id'): self.__add_archive('u', r_id, p_id) if filters.get('problems'): for problem_data in filters['problems']: if problem_data.get('component') and problem_data.get('problem'): unknowns_qs = ReportUnknown.objects.filter( markreport_set__problem=problem_data['problem'], component=problem_data['component'], **common_filters ) else: unknowns_qs = ReportUnknown.objects.filter(cache__marks_total=0, **common_filters) for r_id, p_id in unknowns_qs.values_list('id', 'parent_id'): self.__add_archive('f', r_id, p_id) elif filters.get('unknowns'): for r_id, p_id in ReportUnknown.objects.filter(**common_filters).values_list('id', 'parent_id'): self.__add_archive('f', r_id, p_id)
class JobArchiveGenerator: def __init__(self, job, decisions_ids=None): self.job = job self._decisions_ids = list(map( int, decisions_ids)) if decisions_ids else None self.name = 'Job-{}.zip'.format(self.job.identifier) self._arch_files = set() self.stream = ZipStream() def __iter__(self): # Job data yield from self.stream.compress_string('job.json', self.__get_job_data()) yield from self.stream.compress_string( '{}.json'.format(Decision.__name__), self.__add_decisions_data()) yield from self.stream.compress_string( '{}.json'.format(DecisionCache.__name__), self.__get_decision_cache()) yield from self.stream.compress_string( '{}.json'.format(OriginalSources.__name__), self.__get_original_src()) yield from self.stream.compress_string( '{}.json'.format(ReportComponent.__name__), self.__get_reports_data()) yield from self.stream.compress_string( '{}.json'.format(ReportSafe.__name__), self.__get_safes_data()) yield from self.stream.compress_string( '{}.json'.format(ReportUnsafe.__name__), self.__get_unsafes_data()) yield from self.stream.compress_string( '{}.json'.format(ReportUnknown.__name__), self.__get_unknowns_data()) yield from self.stream.compress_string( '{}.json'.format(ReportAttr.__name__), self.__get_attrs_data()) yield from self.stream.compress_string( '{}.json'.format(CoverageArchive.__name__), self.__get_coverage_data()) self.__add_job_files() self.__add_additional_sources() for file_path, arcname in self._arch_files: yield from self.stream.compress_file(file_path, arcname) yield self.stream.close_stream() @cached_property def _decision_filter(self): if self._decisions_ids: return Q(decision_id__in=self._decisions_ids) return Q(decision__job_id=self.job.id) def __get_job_data(self): return self.__get_json(DownloadJobSerializer(instance=self.job).data) def __add_job_files(self): job_files = {} for fs in FileSystem.objects.filter( decision__job=self.job).select_related('file'): job_files[fs.file.hash_sum] = (fs.file.file.path, fs.file.file.name) for f_path, arcname in job_files.values(): self._arch_files.add((f_path, arcname)) def __add_decisions_data(self): if self._decisions_ids: qs_filter = Q(id__in=self._decisions_ids) else: qs_filter = Q(job_id=self.job.id) decisions_list = [] for decision in Decision.objects.filter(qs_filter).select_related( 'scheduler', 'configuration'): decisions_list.append( DownloadDecisionSerializer(instance=decision).data) self._arch_files.add((decision.configuration.file.path, decision.configuration.file.name)) return self.__get_json(decisions_list) def __get_decision_cache(self): return self.__get_json( DecisionCacheSerializer(instance=DecisionCache.objects.filter( self._decision_filter), many=True).data) def __get_original_src(self): if self._decisions_ids: qs_filter = Q(reportcomponent__decision_id__in=self._decisions_ids) else: qs_filter = Q(reportcomponent__decision__job_id=self.job.id) sources = {} for src_arch in OriginalSources.objects.filter(qs_filter): sources[src_arch.identifier] = src_arch.archive.name self._arch_files.add( (src_arch.archive.path, src_arch.archive.name)) return self.__get_json(sources) def __get_reports_data(self): reports = [] for report in ReportComponent.objects.filter(self._decision_filter)\ .select_related('parent', 'computer', 'original_sources', 'additional_sources').order_by('level'): report_data = DownloadReportComponentSerializer( instance=report).data # Add report files if report_data['log']: self._arch_files.add((report.log.path, report_data['log'])) if report_data['verifier_files']: self._arch_files.add((report.verifier_files.path, report_data['verifier_files'])) reports.append(report_data) return self.__get_json(reports) def __get_safes_data(self): safes_queryset = ReportSafe.objects.filter( self._decision_filter).select_related('parent').order_by('id') return self.__get_json( DownloadReportSafeSerializer(instance=safes_queryset, many=True).data) def __get_unsafes_data(self): reports = [] for report in ReportUnsafe.objects.filter( self._decision_filter).select_related('parent').order_by('id'): report_data = DownloadReportUnsafeSerializer(instance=report).data if report_data['error_trace']: self._arch_files.add( (report.error_trace.path, report_data['error_trace'])) reports.append(report_data) return self.__get_json(reports) def __get_unknowns_data(self): reports = [] for report in ReportUnknown.objects.filter( self._decision_filter).select_related('parent').order_by('id'): report_data = DownloadReportUnknownSerializer(instance=report).data if report_data['problem_description']: self._arch_files.add((report.problem_description.path, report_data['problem_description'])) reports.append(report_data) return self.__get_json(reports) def __get_attrs_data(self): if self._decisions_ids: qs_filter = Q(report__decision_id__in=self._decisions_ids) else: qs_filter = Q(report__decision__job_id=self.job.id) attrs_data = {} for ra in ReportAttr.objects.filter(qs_filter).select_related( 'data', 'report').order_by('id'): data = DownloadReportAttrSerializer(instance=ra).data if data['data_file']: self._arch_files.add((ra.data.file.path, data['data_file'])) attrs_data.setdefault(ra.report.decision_id, {}) attrs_data[ra.report.decision_id].setdefault( ra.report.identifier, []) attrs_data[ra.report.decision_id][ra.report.identifier].append( data) return self.__get_json(attrs_data) def __get_coverage_data(self): if self._decisions_ids: qs_filter = Q(report__decision_id__in=self._decisions_ids) else: qs_filter = Q(report__decision__job_id=self.job.id) coverage_data = [] for carch in CoverageArchive.objects.filter(qs_filter).select_related( 'report').order_by('id'): coverage_data.append({ 'decision': carch.report.decision_id, 'report': carch.report.identifier, 'identifier': carch.identifier, 'archive': carch.archive.name, 'name': carch.name }) self._arch_files.add((carch.archive.path, carch.archive.name)) return self.__get_json(coverage_data) def __add_additional_sources(self): for src_arch in AdditionalSources.objects.filter( self._decision_filter): self._arch_files.add( (src_arch.archive.path, src_arch.archive.name)) def __get_json(self, data): return json.dumps(data, ensure_ascii=False, sort_keys=True, indent=2)
class FilesForCompetitionArchive: obj_attr = 'Program fragment' requirement_attr = 'Requirement' def __init__(self, job, filters): try: self.root = ReportRoot.objects.get(job=job) except ObjectDoesNotExist: raise BridgeException(_('The job is not decided')) self._attrs = self.__get_attrs() self._archives = self.__get_archives() self.filters = filters self._archives_to_upload = [] self.__get_archives_to_upload() self.stream = ZipStream() def __iter__(self): cnt = 0 names_in_use = set() for arch_path, name_pattern in self._archives_to_upload: # TODO: original extension (currently it's supposed that verification files are zip archives) if name_pattern in names_in_use: cnt += 1 arch_name = '%s_%s.zip' % (name_pattern, cnt) else: arch_name = '%s.zip' % name_pattern for data in self.stream.compress_file(arch_path, arch_name): yield data yield self.stream.close_stream() def __get_archives(self): archives = {} for c in ReportComponent.objects.filter(root=self.root, verification=True).exclude(verifier_input='')\ .only('id', 'verifier_input'): if c.verifier_input: archives[c.id] = c.verifier_input.path return archives def __get_attrs(self): names = {} for a_name in AttrName.objects.filter(name__in=[self.obj_attr, self.requirement_attr]): names[a_name.id] = a_name.name attrs = {} # Select attributes for all safes, unsafes and unknowns for r_id, n_id, a_val in ReportAttr.objects.filter(report__root=self.root, attr__name_id__in=names)\ .exclude(report__reportunsafe=None, report__reportsafe=None, report__reportunknown=None)\ .values_list('report_id', 'attr__name_id', 'attr__value'): if r_id not in attrs: attrs[r_id] = {} attrs[r_id][names[n_id]] = a_val return attrs def __add_archive(self, r_type, r_id, p_id): if p_id in self._archives and r_id in self._attrs \ and self.obj_attr in self._attrs[r_id] \ and self.requirement_attr in self._attrs[r_id]: ver_obj = self._attrs[r_id][self.obj_attr].replace('~', 'HOME').replace('/', '---') ver_requirement = self._attrs[r_id][self.requirement_attr].replace(':', '-') dirname = 'Unknowns' if r_type == 'f' else 'Unsafes' if r_type == 'u' else 'Safes' self._archives_to_upload.append( (self._archives[p_id], '{0}/{1}__{2}__{3}'.format(dirname, r_type, ver_requirement, ver_obj)) ) def __get_archives_to_upload(self): for f_t in self.filters: if isinstance(f_t, list) and f_t: for problem in f_t: comp_id, problem_id = problem.split('_')[0:2] if comp_id == problem_id == '0': queryset = ReportUnknown.objects.annotate(mr_len=Count('markreport_set'))\ .filter(root=self.root, mr_len=0).exclude(parent__parent=None)\ .values_list('id', 'parent_id') else: queryset = ReportUnknown.objects \ .filter(root=self.root, markreport_set__problem_id=problem_id, component_id=comp_id)\ .exclude(parent__parent=None).values_list('id', 'parent_id') for args in queryset: self.__add_archive('f', *args) else: model = ReportUnsafe if f_t == 'u' else ReportSafe if f_t == 's' else ReportUnknown for args in model.objects.filter(root=self.root).exclude(parent__parent=None)\ .values_list('id', 'parent_id'): self.__add_archive('f' if isinstance(f_t, list) else f_t, *args)