def bulk_export_async(bulk_export_helper, download_id, filename="bulk_export", expiry=10 * 60 * 60, domain=None): total = sum( [len(file.export_objects) for file in bulk_export_helper.bulk_files]) + 1 def _update_progress(progress): DownloadBase.set_progress(bulk_export_async, progress, total) _update_progress( 1) # give the user some feedback that something is happening if bulk_export_helper.zip_export: filename = "%s_%s" % (domain, filename) if domain else filename _, path = tempfile.mkstemp() os.close(_) zf = zipfile.ZipFile(path, 'w', zipfile.ZIP_DEFLATED) try: for i, file in enumerate(bulk_export_helper.bulk_files): try: bulk = Temp( file.generate_bulk_file( update_progress=_update_progress)) zf.write(bulk.path, "%s/%s" % (filename, file.filename)) except Exception as e: logging.exception( "FAILED to add file to bulk export archive. %s" % e) finally: zf.close() try: return cache_file_to_be_served(tmp=Temp(path), checkpoint=bulk_export_helper, download_id=download_id, filename=filename, format='zip', expiry=expiry) finally: try: os.remove(path) except OSError as e: # the file has already been removed pass else: export_object = bulk_export_helper.bulk_files[0] return cache_file_to_be_served(tmp=Temp( export_object.generate_bulk_file( update_progress=_update_progress)), checkpoint=bulk_export_helper, download_id=download_id, filename=export_object.filename, format=export_object.format, expiry=expiry)
class ExportFile(object): # This is essentially coppied from couchexport.files.ExportFiles def __init__(self, path, format): self.file = Temp(path) self.format = format def __enter__(self): return self.file.payload def __exit__(self, exc_type, exc_val, exc_tb): self.file.delete()
def export_data_source(request, domain, config_id): config = get_document_or_404(DataSourceConfiguration, domain, config_id) adapter = IndicatorSqlAdapter(config) q = adapter.get_query_object() table = adapter.get_table() try: params = process_url_params(request.GET, table.columns) except UserQueryError as e: return HttpResponse(e.message, status=400) q = q.filter_by(**params.keyword_filters) for sql_filter in params.sql_filters: q = q.filter(sql_filter) # build export def get_table(q): yield table.columns.keys() for row in q: yield row fd, path = tempfile.mkstemp() with os.fdopen(fd, 'wb') as tmpfile: try: tables = [[config.table_id, get_table(q)]] export_from_tables(tables, tmpfile, params.format) except exc.DataError: msg = _("There was a problem executing your query, please make " "sure your parameters are valid.") return HttpResponse(msg, status=400) return export_response(Temp(path), params.format, config.display_name)
def bulk_export_async(bulk_export_helper, download_id, filename="bulk_export", expiry=10 * 60 * 60, domain=None): if bulk_export_helper.zip_export: filename = "%s_%s" % (domain, filename) if domain else filename _, path = tempfile.mkstemp() os.close(_) zf = zipfile.ZipFile(path, 'w', zipfile.ZIP_DEFLATED) try: for file in bulk_export_helper.bulk_files: try: bulk = Temp(file.generate_bulk_file()) zf.write(bulk.path, "%s/%s" % (filename, file.filename)) except Exception as e: logging.exception( "FAILED to add file to bulk export archive. %s" % e) finally: zf.close() try: return cache_file_to_be_served(tmp=Temp(path), checkpoint=bulk_export_helper, download_id=download_id, filename=filename, format='zip', expiry=expiry) finally: try: os.remove(path) except OSError as e: # the file has already been removed pass else: export_object = bulk_export_helper.bulk_files[0] return cache_file_to_be_served(tmp=Temp( export_object.generate_bulk_file()), checkpoint=bulk_export_helper, download_id=download_id, filename=export_object.filename, format=export_object.format, expiry=expiry)
def export_data_source(request, domain, config_id): config, _ = get_datasource_config_or_404(config_id, domain) adapter = IndicatorSqlAdapter(config) q = adapter.get_query_object() table = adapter.get_table() try: params = process_url_params(request.GET, table.columns) allowed_formats = [ Format.CSV, Format.HTML, Format.XLS, Format.XLS_2007, ] if params.format not in allowed_formats: msg = ugettext_lazy( 'format must be one of the following: {}').format( ', '.join(allowed_formats)) return HttpResponse(msg, status=400) except UserQueryError as e: return HttpResponse(e.message, status=400) q = q.filter_by(**params.keyword_filters) for sql_filter in params.sql_filters: q = q.filter(sql_filter) # xls format has limit of 65536 rows # First row is taken up by headers if params.format == Format.XLS and q.count() >= 65535: keyword_params = dict(**request.GET) # use default format if 'format' in keyword_params: del keyword_params['format'] return HttpResponseRedirect( '%s?%s' % (reverse('export_configurable_data_source', args=[domain, config._id]), urlencode(keyword_params))) # build export def get_table(q): yield table.columns.keys() for row in q: yield row fd, path = tempfile.mkstemp() with os.fdopen(fd, 'wb') as tmpfile: try: tables = [[config.table_id, get_table(q)]] export_from_tables(tables, tmpfile, params.format) except exc.DataError: msg = ugettext_lazy("There was a problem executing your query, " "please make sure your parameters are valid.") return HttpResponse(msg, status=400) return export_response(Temp(path), params.format, config.display_name)
def create_metadata_export(download_id, domain, format, filename, datespan=None, user_ids=None): tmp_path = save_metadata_export_to_tempfile(domain, format, datespan, user_ids) class FakeCheckpoint(object): # for some silly reason the export cache function wants an object that looks like this # so just hack around it with this stub class rather than do a larger rewrite def __init__(self, domain): self.domain = domain @property def get_id(self): return '%s-form-metadata' % self.domain return cache_file_to_be_served(Temp(tmp_path), FakeCheckpoint(domain), download_id, format, filename)
def __init__(self, path, format): self.file = Temp(path) self.format = format