def rebuild_export(config, schema, output_dir, last_access_cutoff=None, filter=None): if output_dir == "couch": saved = get_saved_export_and_delete_copies(config.index) if last_access_cutoff and saved and saved.last_accessed and \ saved.last_accessed < last_access_cutoff: # ignore exports that haven't been accessed since last_access_cutoff return try: files = schema.get_export_files(format=config.format, filter=filter) except SchemaMismatchException: # fire off a delayed force update to prevent this from happening again rebuild_schemas.delay(config.index) raise ExportRebuildError(u'Schema mismatch for {}. Rebuilding tables...'.format(config.filename)) with files: payload = files.file.payload if output_dir == "couch": if not saved: saved = SavedBasicExport(configuration=config) else: saved.configuration = config if saved.last_accessed is None: saved.last_accessed = datetime.utcnow() saved.last_updated = datetime.utcnow() saved.save() saved.set_payload(payload) else: with open(os.path.join(output_dir, config.filename), "wb") as f: f.write(payload)
def export_for_group(export_id_or_group, output_dir): if isinstance(export_id_or_group, basestring): try: config = GroupExportConfiguration.get(export_id_or_group) except ResourceNotFound: raise Exception("Couldn't find an export with id %s" % export_id_or_group) else: config = export_id_or_group for config, schema in config.all_exports: try: files = schema.get_export_files(format=config.format) except SchemaMismatchException, e: # fire off a delayed force update to prevent this from happening again rebuild_schemas.delay(config.index) continue payload = files.file.payload if output_dir == "couch": saved = SavedBasicExport.view("couchexport/saved_exports", key=json.dumps(config.index), include_docs=True, reduce=False).one() if not saved: saved = SavedBasicExport(configuration=config) else: saved.configuration = config saved.last_updated = datetime.utcnow() saved.save() saved.set_payload(payload) else: with open(os.path.join(output_dir, config.filename), "wb") as f: f.write(payload)
def export_for_group(export_id_or_group, output_dir): if isinstance(export_id_or_group, basestring): try: config = GroupExportConfiguration.get(export_id_or_group) except ResourceNotFound: raise Exception("Couldn't find an export with id %s" % export_id_or_group) else: config = export_id_or_group for config, schema in config.all_exports: try: tmp, _ = schema.get_export_files(format=config.format) except SchemaMismatchException, e: # fire off a delayed force update to prevent this from happening again rebuild_schemas.delay(config.index) continue payload = Temp(tmp).payload if output_dir == "couch": saved = SavedBasicExport.view("couchexport/saved_exports", key=json.dumps(config.index), include_docs=True, reduce=False).one() if not saved: saved = SavedBasicExport(configuration=config) else: saved.configuration = config saved.last_updated = datetime.utcnow() saved.save() saved.set_payload(payload) else: with open(os.path.join(output_dir, config.filename), "wb") as f: f.write(payload)
def handle(self, *args, **options): if len(args) < 2: raise CommandError('Please specify %s.' % self.label) export_id = args[0] output_dir = args[1] try: config = GroupExportConfiguration.get(export_id) except ResourceNotFound: raise CommandError("Couldn't find an export with id %s" % export_id) for export_config in config.full_exports: print "exporting %s to %s" % (export_config.name, output_dir) # special case couch storage if output_dir == "couch": fd, path = tempfile.mkstemp() with os.fdopen(fd, 'wb') as f: export(export_config.index, f, format=export_config.format) # got the file, now rewrite it to couch saved = SavedBasicExport.view("couchexport/saved_exports", key=json.dumps(export_config.index), include_docs=True, reduce=False).one() if not saved: saved = SavedBasicExport(configuration=export_config) saved.save() with open(path, "rb") as f: saved.put_attachment(f.read(), export_config.filename) saved.last_updated = datetime.utcnow() saved.save() os.remove(path) else: with open(os.path.join(output_dir, export_config.filename), "wb") as f: export(export_config.index, f, format=export_config.format)
def _save_export_payload(files, saved_export, config, is_safe=False): payload = files.file.payload if not saved_export: saved_export = SavedBasicExport(configuration=config) else: saved_export.configuration = config saved_export.is_safe = is_safe if saved_export.last_accessed is None: saved_export.last_accessed = datetime.utcnow() saved_export.last_updated = datetime.utcnow() try: saved_export.save() except ResourceConflict: # task was executed concurrently, so let first to finish win and abort the rest pass else: saved_export.set_payload(payload)