def upload(self): # Extract job archive self.__change_upload_status(JOB_UPLOAD_STATUS[1][0]) with self._upload_obj.archive.file as fp: job_dir = extract_archive(fp) # Upload job files self.__change_upload_status(JOB_UPLOAD_STATUS[2][0]) self.__upload_job_files(os.path.join(job_dir.name, JOBFILE_DIR)) # Save job self.__change_upload_status(JOB_UPLOAD_STATUS[3][0]) serializer_data = self.__parse_job_json(os.path.join(job_dir.name, 'job.json')) serializer = DownloadJobSerializer(data=serializer_data) serializer.is_valid(raise_exception=True) self.job = serializer.save( author=self._upload_obj.author, preset_id=self.__get_preset_id(serializer_data.get('preset_info')) ) # Upload job reports self.__change_upload_status(JOB_UPLOAD_STATUS[4][0]) res = UploadReports(self._upload_obj.author, self.job, job_dir.name) if res.decisions: # Recalculate cache if job has decisions self.__change_upload_status(JOB_UPLOAD_STATUS[5][0]) Recalculation('all', res.decisions)
def post(self, request): if not MarkAccess(request.user).can_upload: raise exceptions.PermissionDenied( _("You don't have an access to create new marks")) marks_links = [] marks_uploader = MarksUploader(request.user) for f in self.request.FILES.getlist('file'): with zipfile.ZipFile(f, 'r') as zfp: if all( file_name.endswith('.zip') for file_name in zfp.namelist()): marks_dir = extract_archive(f) for arch_name in os.listdir(marks_dir.name): with open(os.path.join(marks_dir.name, arch_name), mode='rb') as fp: marks_links.append( marks_uploader.upload_mark( File(fp, name=arch_name))[1]) pass else: marks_links.append(marks_uploader.upload_mark(f)[1]) if len(marks_links) == 1: return Response({'url': marks_links[0]}) return Response({ 'message': _('Number of created marks: %(number)s') % { 'number': len(marks_links) } })
def get_context_data(self, **kwargs): if self.request.user.extended.role not in [ USER_ROLES[2][0], USER_ROLES[4][0] ]: raise BridgeException("You don't have an access to upload marks") marks_dir = extract_archive(self.request.FILES['file']) return UploadAllMarks(self.request.user, marks_dir.name, bool(int(self.request.POST.get('delete', 0)))).numbers
def get_context_data(self, **kwargs): if self.request.user.extended.role != USER_ROLES[2][0]: raise BridgeException(_("You don't have an access to upload jobs tree")) if Job.objects.filter(status__in=[JOB_STATUS[1][0], JOB_STATUS[2][0]]).count() > 0: raise BridgeException(_("There are jobs in progress right now, uploading may corrupt it results. " "Please wait until it will be finished.")) jobs_dir = extract_archive(self.request.FILES['file']) UploadTree(self.request.POST['parent_id'], self.request.user, jobs_dir.name) return {}
def get_context_data(self, **kwargs): if not jobs.utils.JobAccess(self.request.user, self.object).can_decide(): raise BridgeException(_("You don't have an access to upload reports for this job")) try: reports_dir = extract_archive(self.request.FILES['archive']) except Exception as e: logger.exception(e) raise BridgeException(_('Extraction of the archive has failed')) UploadReportsWithoutDecision(self.object, self.request.user, reports_dir.name) return {}
def upload_all(self): if all( os.path.splitext(file_path)[-1] == '.zip' for file_path in self._arch_files): # A list of jobs jobs_dir = extract_archive(self._archive) for arch_name in os.listdir(jobs_dir.name): with open(os.path.join(jobs_dir.name, arch_name), mode='rb') as fp: self.__save_archive(File(fp, name=arch_name)) else: # A single job self.__save_archive(self._archive)
def post(self, request): if not MarkAccess(request.user).can_upload: raise exceptions.PermissionDenied( _("You don't have an access to create new marks")) marks_links = [] failed_mark_uploads = 0 marks_uploader = MarksUploader(request.user) for f in self.request.FILES.getlist('file'): with zipfile.ZipFile(f, 'r') as zfp: if all( file_name.endswith('.zip') for file_name in zfp.namelist()): marks_dir = extract_archive(f) for arch_name in os.listdir(marks_dir.name): with open(os.path.join(marks_dir.name, arch_name), mode='rb') as fp: try: marks_links.append( marks_uploader.upload_mark( File(fp, name=arch_name))[1]) except Exception as e: logger.exception(e) logger.error( 'Uploading of mark "{}" has failed.'. format(arch_name)) failed_mark_uploads += 1 else: marks_links.append(marks_uploader.upload_mark(f)[1]) if len(marks_links) == 1: return Response({'url': marks_links[0]}) if failed_mark_uploads: return Response({ 'message': _('Number of created marks: %(number)s.' ' Number of marks which uploading failed: %(failed_number)s.' ' See logs for details.') % { 'number': len(marks_links), 'failed_number': failed_mark_uploads } }) else: return Response({ 'message': _('Number of created marks: %(number)s') % { 'number': len(marks_links) } })
def __get_files(self, archive): archive.seek(0) try: files_dir = extract_archive(archive) except Exception as e: logger.exception("Archive extraction failed: %s" % e, stack_info=True) raise ValueError('Archive "%s" with attributes data is corrupted' % archive.name) for dir_path, dir_names, file_names in os.walk(files_dir.name): for file_name in file_names: full_path = os.path.join(dir_path, file_name) rel_path = os.path.relpath(full_path, files_dir.name).replace('\\', '/') newfile = AttrFile(root_id=self._root_id) with open(full_path, mode='rb') as fp: newfile.file.save(os.path.basename(rel_path), File(fp), True) self._files[rel_path] = newfile.id
def __upload_attrs_files(self, archive): if not archive: return {} try: files_dir = extract_archive(archive) except Exception as e: logger.exception("Archive extraction failed: %s" % e) raise exceptions.ValidationError(detail={'attr_data': 'Archive "{}" is corrupted'.format(archive.name)}) db_files = {} for dir_path, dir_names, file_names in os.walk(files_dir.name): for file_name in file_names: full_path = os.path.join(dir_path, file_name) rel_path = os.path.relpath(full_path, files_dir.name).replace('\\', '/') newfile = AttrFile(decision=self.decision) with open(full_path, mode='rb') as fp: newfile.file.save(os.path.basename(rel_path), File(fp), save=True) db_files[rel_path] = newfile.pk return db_files
def get_context_data(self, **kwargs): if not jobs.utils.JobAccess(self.request.user).can_create(): raise BridgeException(_("You don't have an access to upload jobs")) for f in self.request.FILES.getlist('file'): try: job_dir = extract_archive(f) except Exception as e: logger.exception(e) raise BridgeException(_('Extraction of the archive "%(arcname)s" has failed') % {'arcname': f.name}) try: UploadJob(self.kwargs['parent_id'], self.request.user, job_dir.name) except BridgeException as e: raise BridgeException(_('Creating the job from archive "%(arcname)s" failed: %(message)s') % { 'arcname': f.name, 'message': str(e) }) except Exception as e: logger.exception(e) raise BridgeException(_('Creating the job from archive "%(arcname)s" failed: %(message)s') % { 'arcname': f.name, 'message': _('The job archive is corrupted') }) return {}
def upload(self): # Extract job archive self._logger.log('=' * 30) self._logger.start(JOB_UPLOAD_STATUS[1][0]) with self._upload_obj.archive.file as fp: job_dir = extract_archive(fp) self._jobdir = job_dir.name # Upload job files self._logger.start(JOB_UPLOAD_STATUS[2][0]) self.__upload_job_files(os.path.join(job_dir.name, JOBFILE_DIR)) # Save job self._logger.start(JOB_UPLOAD_STATUS[3][0]) serializer_data = self.__parse_job_json( os.path.join(job_dir.name, 'job.json')) serializer = DownloadJobSerializer(data=serializer_data) serializer.is_valid(raise_exception=True) self.job = serializer.save(author=self._upload_obj.author, preset_id=self.__get_preset_id( serializer_data.get('preset_info'))) # Upload job decisions objects with cache self._logger.start(JOB_UPLOAD_STATUS[4][0]) self.__upload_decisions() if not self._decisions: self._logger.finish_all() return self._logger.start(JOB_UPLOAD_STATUS[5][0]) self.__upload_original_sources() self._logger.end() self.__upload_reports() self.__change_decision_statuses() # Recalculate cache if job has decisions self._logger.start(JOB_UPLOAD_STATUS[12][0]) Recalculation('all', list(self._decisions.values())) self._logger.finish_all()
def post(self, request): marks_dir = extract_archive(self.request.FILES['file']) res = UploadAllMarks(request.user, marks_dir.name, bool(int(request.POST.get('delete', 0)))) return Response(res.numbers)