def export_data_shared(export_tag, format=None, filename=None, previous_export_id=None, filter=None, use_cache=True, max_column_size=2000, separator='|'): """ Shared method for export. If there is data, return an HTTPResponse with the appropriate data. If there is not data returns None. """ if previous_export_id and not SavedExportSchema.get_db().doc_exist( previous_export_id): return HttpResponseNotFound( _('No previous export with id "{id}" found'.format( id=previous_export_id))) if not filename: filename = export_tag files = DefaultExportSchema(index=export_tag).get_export_files( format=format, previous_export_id=previous_export_id, filter=filter, use_cache=use_cache, max_column_size=max_column_size, separator=separator) if files and files.checkpoint: return export_response(files.file, format, filename, files.checkpoint) else: return None
def export_data_shared(export_tag, format=None, filename=None, previous_export_id=None, filter=None, use_cache=True, max_column_size=2000, separator='|'): """ Shared method for export. If there is data, return an HTTPResponse with the appropriate data. If there is not data returns None. """ if previous_export_id and not SavedExportSchema.get_db().doc_exist(previous_export_id): return HttpResponseNotFound( _('No previous export with id "{id}" found'.format(id=previous_export_id))) if not filename: filename = export_tag files = DefaultExportSchema(index=export_tag).get_export_files( format=format, previous_export_id=previous_export_id, filter=filter, use_cache=use_cache, max_column_size=max_column_size, separator=separator ) if files and files.checkpoint: return export_response(files.file, format, filename, files.checkpoint) else: return None
def tearDownClass(cls): delete_all_docs_by_doc_type(SavedExportSchema.get_db(), (SavedExportSchema.__name__, )) super(SavedExportSchemaDBTest, cls).tearDownClass()
def tearDownClass(cls): delete_all_docs_by_doc_type(SavedExportSchema.get_db(), (SavedExportSchema.__name__,)) super(SavedExportSchemaDBTest, cls).tearDownClass()