def handle(self, *args, **options): if len(args) < 2: raise CommandError('Please specify %s.' % self.label) export_id = args[0] output_dir = args[1] try: config = GroupExportConfiguration.get(export_id) except ResourceNotFound: raise CommandError("Couldn't find an export with id %s" % export_id) for export_config in config.full_exports: print "exporting %s to %s" % (export_config.name, output_dir) # special case couch storage if output_dir == "couch": fd, path = tempfile.mkstemp() with os.fdopen(fd, 'wb') as f: export(export_config.index, f, format=export_config.format) # got the file, now rewrite it to couch saved = SavedBasicExport.view("couchexport/saved_exports", key=json.dumps(export_config.index), include_docs=True, reduce=False).one() if not saved: saved = SavedBasicExport(configuration=export_config) saved.save() with open(path, "rb") as f: saved.put_attachment(f.read(), export_config.filename) saved.last_updated = datetime.utcnow() saved.save() os.remove(path) else: with open(os.path.join(output_dir, export_config.filename), "wb") as f: export(export_config.index, f, format=export_config.format)
def get_export_files(export_tag, format=None, previous_export_id=None, filter=None, use_cache=True, max_column_size=2000, separator='|'): # the APIs of how these methods are broken down suck, but at least # it's DRY from couchexport.export import export CACHE_TIME = 1 * 60 * 60 # cache for 1 hour, in seconds def _build_cache_key(tag, prev_export_id, format, max_column_size): def _human_readable_key(tag, prev_export_id, format, max_column_size): return "couchexport_:%s:%s:%s:%s" % (tag, prev_export_id, format, max_column_size) return hashlib.md5(_human_readable_key(tag, prev_export_id, format, max_column_size)).hexdigest() # check cache, only supported for filterless queries, currently cache_key = _build_cache_key(export_tag, previous_export_id, format, max_column_size) if use_cache and filter is None: cached_data = cache.get(cache_key) if cached_data: (tmp, checkpoint) = cached_data return (tmp, checkpoint) tmp = StringIO() checkpoint = export(export_tag, tmp, format=format, previous_export_id=previous_export_id, filter=filter, max_column_size=max_column_size, separator=separator) if checkpoint: if use_cache: cache.set(cache_key, (tmp, checkpoint), CACHE_TIME) return (tmp, checkpoint) return (None, None) # hacky empty case
def schema_export(namespace, download_id): cache_container = {} tmp = tempfile.NamedTemporaryFile(suffix='.xls', delete=False) if export(namespace, tmp, format=Format.XLS): cache_container['mimetype'] = 'application/vnd.ms-excel' cache_container['Content-Disposition'] = 'attachment; filename=%s.xls' % namespace cache_container['location'] = tmp.name tmp.close() else: cache_container = {} cache_container['mimetype'] = 'text/plain' cache_container['location'] = None cache_container['message'] = "No data due to an error generating the file" cache.set(download_id, simplejson.dumps(cache_container), 86400)