def export_task(pk): from bambu.dataportability.models import ExportJob job = ExportJob.objects.get(pk = pk) for j in ExportJob.objects.filter(user = job.user): if j.progress == 100: j.delete() elif (now() - job.updated).seconds > 60 * 60: j.delete() def finished(stream): stream.seek(0) job.data = File(stream) job.save() job.updates.success(u'Finished export') handle, filename = mkstemp( helpers.get_extension_for_writer(job.writer) ) os.close(handle) stream = open(filename, 'r+w') module, dot, klass = job.handler.rpartition('.') module = import_module(module) handler = getattr(module, klass)(job) module, dot, klass = job.writer.rpartition('.') module = import_module(module) writer = getattr(module, klass)( stream, handler.export_wrapper, handler.export_item ) handler.start_export(writer, finished)
def start(self): from django.utils.importlib import import_module for job in ExportJob.objects.filter(user = self.user): if job.progress == 100: job.delete() elif (datetime.utcnow().replace(tzinfo = utc) - job.updated).seconds > 60 * 60: job.delete() def finished(stream): stream.seek(0) self.data = File(stream) self.save() self.updates.success('Finished export') handle, filename = mkstemp( helpers.get_extension_for_writer(self.writer) ) os.close(handle) stream = open(filename, 'r+w') module, dot, klass = self.handler.rpartition('.') module = import_module(module) handler = getattr(module, klass)(self) module, dot, klass = self.writer.rpartition('.') module = import_module(module) writer = getattr(module, klass)( stream, handler.export_wrapper, handler.export_item ) handler.start_export(writer, finished)
def export_task(pk): from bambu.dataportability.models import ExportJob job = ExportJob.objects.get(pk=pk) for j in ExportJob.objects.filter(user=job.user): if j.progress == 100: j.delete() elif (now() - job.updated).seconds > 60 * 60: j.delete() def finished(stream): stream.seek(0) job.data = File(stream) job.save() job.updates.success(u'Finished export') handle, filename = mkstemp(helpers.get_extension_for_writer(job.writer)) os.close(handle) stream = open(filename, 'r+w') module, dot, klass = job.handler.rpartition('.') module = import_module(module) handler = getattr(module, klass)(job) module, dot, klass = job.writer.rpartition('.') module = import_module(module) writer = getattr(module, klass)(stream, handler.export_wrapper, handler.export_item) handler.start_export(writer, finished)
def save(self, *args, **kwargs): if not self.name: self.name = '%s_%d_%s%s' % ( self.content_type.model, self.object_id, datetime.utcnow().replace(tzinfo = utc).strftime('%Y-%m-%d'), helpers.get_extension_for_writer(self.writer) ) super(ExportJob, self).save(*args, **kwargs)
def save(self, *args, **kwargs): if not self.name: self.name = '%s_%d_%s%s' % ( self.content_type.model, self.object_id, now().strftime('%Y-%m-%d'), helpers.get_extension_for_writer(self.writer) ) if self.pk: old = ExportJob.objects.get(pk = self.pk) dopush = self.progress > old.progress else: dopush = False super(ExportJob, self).save(*args, **kwargs) if dopush: push( channel = 'bambu.dataportability.%d' % self.pk, event = 'progress', progress = self.progress )