def cache_file_to_be_served(tmp, checkpoint, download_id, format=None, filename=None, expiry=10*60*60): """ tmp can be either either a path to a tempfile or a StringIO (the APIs for tempfiles vs StringIO are unfortunately... not similar) """ if checkpoint: format = Format.from_format(format) try: filename = unidecode(filename) except Exception: pass escaped_filename = escape_quotes('%s.%s' % (filename, format.extension)) payload = tmp.payload expose_cached_download(payload, expiry, ".{}".format(format.extension), mimetype=format.mimetype, content_disposition='attachment; filename="%s"' % escaped_filename, extras={'X-CommCareHQ-Export-Token': checkpoint.get_id}, download_id=download_id) tmp.delete() else: # this just gives you a link saying there wasn't anything there expose_cached_download("Sorry, there wasn't any data.", expiry, None, content_disposition="", mimetype="text/html", download_id=download_id).save(expiry)
def export_async(custom_export, download_id, format=None, filename=None, **kwargs): try: export_files = custom_export.get_export_files(format=format, process=export_async, **kwargs) except SchemaMismatchException as e: # fire off a delayed force update to prevent this from happening again rebuild_schemas.delay(custom_export.index) expiry = 10 * 60 * 60 expose_cached_download( "Sorry, the export failed for %s, please try again later" % custom_export._id, expiry, None, content_disposition="", mimetype="text/html", download_id=download_id).save(expiry) else: if export_files: if export_files.format is not None: format = export_files.format if not filename: filename = custom_export.name return cache_file_to_be_served(export_files.file, export_files.checkpoint, download_id, format, filename) else: return cache_file_to_be_served(None, None, download_id, format, filename)
def populate_export_download_task(export_instances, filters, download_id, filename=None, expiry=10 * 60 * 60): export_file = get_export_file( export_instances, filters, # We don't have a great way to calculate progress if it's a bulk download, # so only track the progress for single instance exports. progress_tracker=populate_export_download_task if len(export_instances) == 1 else None) file_format = Format.from_format(export_file.format) filename = filename or export_instances[0].name payload = export_file.file.payload expose_cached_download( payload, expiry, ".{}".format(file_format.extension), mimetype=file_format.mimetype, content_disposition=safe_filename_header(filename, file_format.extension), download_id=download_id, ) export_file.file.delete()
def populate_export_download_task(export_instances, filters, download_id, filename=None, expiry=10 * 60 * 60): export_file = get_export_file( export_instances, filters, # We don't have a great way to calculate progress if it's a bulk download, # so only track the progress for single instance exports. progress_tracker=populate_export_download_task if len(export_instances) == 1 else None) file_format = Format.from_format(export_file.format) filename = filename or export_instances[0].name escaped_filename = escape_quotes('%s.%s' % (filename, file_format.extension)) escaped_filename = urllib.quote(escaped_filename.encode('utf8')) payload = export_file.file.payload expose_cached_download( payload, expiry, ".{}".format(file_format.extension), mimetype=file_format.mimetype, content_disposition='attachment; filename="%s"' % escaped_filename, download_id=download_id, ) export_file.file.delete()
def build_application_zip(include_multimedia_files, include_index_files, app, download_id, build_profile_id=None, compress_zip=False, filename="commcare.zip"): from corehq.apps.hqmedia.views import iter_app_files DownloadBase.set_progress(build_application_zip, 0, 100) errors = [] compression = zipfile.ZIP_DEFLATED if compress_zip else zipfile.ZIP_STORED use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled if use_transfer: fpath = os.path.join( settings.SHARED_DRIVE_CONF.transfer_dir, "{}{}{}{}{}".format(app._id, 'mm' if include_multimedia_files else '', 'ccz' if include_index_files else '', app.version, build_profile_id)) else: _, fpath = tempfile.mkstemp() if not (os.path.isfile(fpath) and use_transfer): # Don't rebuild the file if it is already there files, errors = iter_app_files(app, include_multimedia_files, include_index_files, build_profile_id) with open(fpath, 'wb') as tmp: with zipfile.ZipFile(tmp, "w") as z: for path, data in files: # don't compress multimedia files extension = os.path.splitext(path)[1] file_compression = zipfile.ZIP_STORED if extension in MULTIMEDIA_EXTENSIONS else compression z.writestr(path, data, file_compression) common_kwargs = dict( mimetype='application/zip' if compress_zip else 'application/x-zip-compressed', content_disposition='attachment; filename="{fname}"'.format( fname=filename), download_id=download_id, ) if use_transfer: expose_file_download(fpath, use_transfer=use_transfer, **common_kwargs) else: expose_cached_download( FileWrapper(open(fpath)), expiry=(1 * 60 * 60), file_extension=file_extention_from_filename(filename), **common_kwargs) DownloadBase.set_progress(build_application_zip, 100, 100) return { "errors": errors, }
def build_application_zip(include_multimedia_files, include_index_files, app, download_id, build_profile_id=None, compress_zip=False, filename="commcare.zip"): from corehq.apps.hqmedia.views import iter_app_files DownloadBase.set_progress(build_application_zip, 0, 100) errors = [] compression = zipfile.ZIP_DEFLATED if compress_zip else zipfile.ZIP_STORED use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled if use_transfer: fpath = os.path.join(settings.SHARED_DRIVE_CONF.transfer_dir, "{}{}{}{}{}".format( app._id, 'mm' if include_multimedia_files else '', 'ccz' if include_index_files else '', app.version, build_profile_id )) else: _, fpath = tempfile.mkstemp() if not (os.path.isfile(fpath) and use_transfer): # Don't rebuild the file if it is already there files, errors = iter_app_files(app, include_multimedia_files, include_index_files, build_profile_id) with open(fpath, 'wb') as tmp: with zipfile.ZipFile(tmp, "w") as z: for path, data in files: # don't compress multimedia files extension = os.path.splitext(path)[1] file_compression = zipfile.ZIP_STORED if extension in MULTIMEDIA_EXTENSIONS else compression z.writestr(path, data, file_compression) common_kwargs = dict( mimetype='application/zip' if compress_zip else 'application/x-zip-compressed', content_disposition='attachment; filename="{fname}"'.format(fname=filename), download_id=download_id, ) if use_transfer: expose_file_download( fpath, use_transfer=use_transfer, **common_kwargs ) else: expose_cached_download( FileWrapper(open(fpath)), expiry=(1 * 60 * 60), file_extension=file_extention_from_filename(filename), **common_kwargs ) DownloadBase.set_progress(build_application_zip, 100, 100) return { "errors": errors, }
def _expose_download_link(fpath, filename, compress_zip, download_id): common_kwargs = { 'mimetype': 'application/zip' if compress_zip else 'application/x-zip-compressed', 'content_disposition': 'attachment; filename="{fname}"'.format(fname=filename), 'download_id': download_id, 'expiry': (1 * 60 * 60), } if settings.SHARED_DRIVE_CONF.transfer_enabled: expose_file_download(fpath, use_transfer=True, **common_kwargs) else: expose_cached_download(FileWrapper(open(fpath, 'rb')), file_extension=file_extention_from_filename(filename), **common_kwargs)
def prepare_download(download_id, payload_func, content_disposition, content_type, expiry=10*60*60): """ payload_func should be an instance of SerializableFunction, and can return either a string or a FileWrapper object """ try: payload = payload_func(process=prepare_download) except TypeError: payload = payload_func() expose_cached_download(payload, expiry, None, mimetype=content_type, content_disposition=content_disposition, download_id=download_id)
def export_async(custom_export, download_id, format=None, filename=None, **kwargs): try: export_files = custom_export.get_export_files(format=format, process=export_async, **kwargs) except SchemaMismatchException, e: # fire off a delayed force update to prevent this from happening again rebuild_schemas.delay(custom_export.index) expiry = 10 * 60 * 60 expose_cached_download( "Sorry, the export failed for %s, please try again later" % custom_export._id, expiry, None, content_disposition="", mimetype="text/html", download_id=download_id, ).save(expiry)
def process_upload(self): if hasattr(self.uploaded_file, 'temporary_file_path') and settings.SHARED_DRIVE_CONF.temp_dir: processing_id = uuid.uuid4().hex path = settings.SHARED_DRIVE_CONF.get_temp_file(suffix='.upload') shutil.move(self.uploaded_file.temporary_file_path(), path) status = BulkMultimediaStatusCacheNfs(processing_id, path) status.save() else: self.uploaded_file.file.seek(0) saved_file = expose_cached_download( self.uploaded_file.file.read(), expiry=BulkMultimediaStatusCache.cache_expiry, file_extension=file_extention_from_filename(self.uploaded_file.name), ) processing_id = saved_file.download_id status = BulkMultimediaStatusCache(processing_id) status.save() process_bulk_upload_zip.delay(processing_id, self.domain, self.app_id, username=self.username, share_media=self.share_media, license_name=self.license_used, author=self.author, attribution_notes=self.attribution_notes) return status.get_response()
def post(self, request, *args, **kwargs): upload = request.FILES.get('bulk_upload_file') if not upload: messages.error(request, _('no file uploaded')) return self.get(request, *args, **kwargs) elif not upload.name.endswith('.xlsx'): messages.error(request, _('please use xlsx format only')) return self.get(request, *args, **kwargs) domain = args[0] # stash this in soil to make it easier to pass to celery file_ref = expose_cached_download( upload.read(), expiry=1*60*60, file_extension=file_extention_from_filename(upload.name) ) task = import_products_async.delay( domain, file_ref.download_id, ) file_ref.set_task(task) return HttpResponseRedirect( reverse( ProductImportStatusView.urlname, args=[domain, file_ref.download_id] ) )
def post(self, request, *args, **kwargs): form_ids = self.get_xform_ids(request) if not self.request.can_access_all_locations: inaccessible_forms_accessed = self.inaccessible_forms_accessed( form_ids, self.domain, request.couch_user) if inaccessible_forms_accessed: return HttpResponseBadRequest( "Inaccessible forms accessed. Id(s): %s " % ','.join(inaccessible_forms_accessed)) mode = self.request.POST.get('mode') task_ref = expose_cached_download(payload=None, expiry=1*60*60, file_extension=None) task = bulk_form_management_async.delay( mode, self.domain, self.request.couch_user, form_ids ) task_ref.set_task(task) return HttpResponseRedirect( reverse( XFormManagementStatusView.urlname, args=[self.domain, mode, task_ref.download_id] ) )
def post(self, request, *args, **kwargs): upload = request.FILES.get('bulk_upload_file') if not upload: messages.error(request, _('no file uploaded')) return self.get(request, *args, **kwargs) if not args: messages.error(request, _('no domain specified')) return self.get(request, *args, **kwargs) if upload.content_type != 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': messages.error( request, _("Invalid file-format. Please upload a valid xlsx file.")) return self.get(request, *args, **kwargs) domain = args[0] # stash this in soil to make it easier to pass to celery ONE_HOUR = 1 * 60 * 60 file_ref = expose_cached_download( upload.read(), expiry=ONE_HOUR, file_extension=file_extention_from_filename(upload.name), ) task = import_locations_async.delay( domain, file_ref.download_id, ) # put the file_ref.download_id in cache to lookup from elsewhere cache.set(import_locations_task_key(domain), file_ref.download_id, ONE_HOUR) file_ref.set_task(task) return HttpResponseRedirect( reverse(LocationImportStatusView.urlname, args=[domain, file_ref.download_id]))
def prepare_fixture_download(table_ids, domain, task, download_id): """Prepare fixture data for Excel download """ data_types_book, excel_sheets = _prepare_fixture(table_ids, domain, task=task) header_groups = [("types", excel_sheets["types"]["headers"])] value_groups = [("types", excel_sheets["types"]["rows"])] for data_type in data_types_book: header_groups.append( (data_type.tag, excel_sheets[data_type.tag]["headers"])) value_groups.append( (data_type.tag, excel_sheets[data_type.tag]["rows"])) file = StringIO() format = Format.XLS_2007 export_raw(tuple(header_groups), tuple(value_groups), file, format) return expose_cached_download( file.getvalue(), 60 * 60 * 2, file_extension=".xlsx", mimetype=Format.from_format(format).mimetype, content_disposition='attachment; filename="%s_lookup-tables.xlsx"' % domain, download_id=download_id, )
def post(self, request): replace = 'replace' in request.POST file_ref = expose_cached_download( request.file.read(), file_extension=file_extention_from_filename(request.file.name), expiry=1*60*60, ) # catch basic validation in the synchronous UI try: validate_fixture_file_format(file_ref.get_filename()) except FixtureUploadError as e: messages.error( request, _(u'Please fix the following formatting issues in your excel file: %s') % '<ul><li>{}</li></ul>'.format('</li><li>'.join(e.errors)), extra_tags='html' ) return HttpResponseRedirect(fixtures_home(self.domain)) # hand off to async task = fixture_upload_async.delay( self.domain, file_ref.download_id, replace, ) file_ref.set_task(task) return HttpResponseRedirect( reverse( FixtureUploadStatusView.urlname, args=[self.domain, file_ref.download_id] ) )
def _schedule_task_without_flag(request, domain, action): records = _get_records(request) task_ref = expose_cached_download(payload=None, expiry=1 * 60 * 60, file_extension=None) task = task_operate_on_payloads.delay(records, domain, action) task_ref.set_task(task)
def post(self, request): replace = 'replace' in request.POST file_ref = expose_cached_download( request.file.read(), file_extension=file_extention_from_filename(request.file.name), expiry=1*60*60, ) # catch basic validation in the synchronous UI try: validate_fixture_file_format(file_ref.get_filename()) except FixtureUploadError as e: messages.error( request, _('Please fix the following formatting issues in your Excel file: %s') % '<ul><li>{}</li></ul>'.format('</li><li>'.join(e.errors)), extra_tags='html' ) return HttpResponseRedirect(fixtures_home(self.domain)) # hand off to async task = fixture_upload_async.delay( self.domain, file_ref.download_id, replace, ) file_ref.set_task(task) return HttpResponseRedirect( reverse( FixtureUploadStatusView.urlname, args=[self.domain, file_ref.download_id] ) )
def send_monthly_sms_report(): subject = _('Monthly SMS report') recipients = ['*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**'] try: start_date = date.today().replace(day=1) - relativedelta(months=1) first_day, last_day = calendar.monthrange(start_date.year, start_date.month) end_date = start_date.replace(day=last_day) filename = call_command('get_icds_sms_usage', 'icds-cas', str(start_date), str(end_date)) with open(filename, 'rb') as f: cached_download = expose_cached_download( f.read(), expiry=24 * 60 * 60, file_extension=file_extention_from_filename(filename), mimetype=Format.from_format(Format.XLS_2007).mimetype, content_disposition='attachment; filename="%s"' % filename) path = reverse('retrieve_download', kwargs={'download_id': cached_download.download_id}) link = f"{web.get_url_base()}{path}?get_file" message = _(""" Hi, Please download the sms report for last month at {link}. The report is available only till midnight today. """).format(link=link) send_html_email_async.delay(subject, recipients, message, email_from=settings.DEFAULT_FROM_EMAIL) except Exception as e: message = _(""" Hi, Could not generate the montly SMS report for ICDS. The error has been notified. Please report as an issue for quick followup """) send_html_email_async.delay(subject, recipients, message, email_from=settings.DEFAULT_FROM_EMAIL) raise e
def post(self, request, *args, **kwargs): upload = request.FILES.get('bulk_upload_file') if not upload: messages.error(request, _('no file uploaded')) return self.get(request, *args, **kwargs) if not args: messages.error(request, _('no domain specified')) return self.get(request, *args, **kwargs) if upload.content_type != 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': messages.error(request, _("Invalid file-format. Please upload a valid xlsx file.")) return self.get(request, *args, **kwargs) domain = args[0] # stash this in soil to make it easier to pass to celery ONE_HOUR = 1*60*60 file_ref = expose_cached_download( upload.read(), expiry=ONE_HOUR, file_extension=file_extention_from_filename(upload.name), ) task = import_locations_async.delay( domain, file_ref.download_id, ) # put the file_ref.download_id in cache to lookup from elsewhere cache.set(import_locations_task_key(domain), file_ref.download_id, ONE_HOUR) file_ref.set_task(task) return HttpResponseRedirect( reverse( LocationImportStatusView.urlname, args=[domain, file_ref.download_id] ) )
def post(self, request, *args, **kwargs): upload = request.FILES.get('bulk_upload_file') if not upload: messages.error(request, _('no file uploaded')) return self.get(request, *args, **kwargs) if not args: messages.error(request, _('no domain specified')) return self.get(request, *args, **kwargs) if upload.content_type != 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': messages.error(request, _("Invalid file-format. Please upload a valid xlsx file.")) return self.get(request, *args, **kwargs) domain = args[0] # stash this in soil to make it easier to pass to celery TEN_HOURS = 10 * 60 * 60 file_ref = expose_cached_download( upload.read(), expiry=TEN_HOURS, file_extension=file_extention_from_filename(upload.name), ) # We need to start this task after this current request finishes because this # request uses the lock_locations decorator which acquires the same lock that # the task will try to acquire. task = import_locations_async.apply_async(args=[domain, file_ref.download_id], countdown=10) # put the file_ref.download_id in cache to lookup from elsewhere cache.set(import_locations_task_key(domain), file_ref.download_id, TEN_HOURS) file_ref.set_task(task) return HttpResponseRedirect( reverse( LocationImportStatusView.urlname, args=[domain, file_ref.download_id] ) )
def post(self, request): replace = 'replace' in request.POST file_ref = expose_cached_download(request.file.read(), expiry=1*60*60) # catch basic validation in the synchronous UI try: validate_file_format(file_ref.get_filename()) except (FixtureUploadError, JSONReaderError, HeaderValueError) as e: messages.error(request, _(u'Upload unsuccessful: %s') % e) return HttpResponseRedirect(fixtures_home(self.domain)) # hand off to async task = fixture_upload_async.delay( self.domain, file_ref.download_id, replace, ) file_ref.set_task(task) return HttpResponseRedirect( reverse( FixtureUploadStatusView.urlname, args=[self.domain, file_ref.download_id] ) )
def test_no_auth_needed(self): ref = expose_cached_download(BytesIO(b'content'), expiry=60, file_extension='txt') response = self.client.get( reverse('retrieve_download', args=[ref.download_id]) + "?get_file") self.assertEqual(response.content, b'content')
def post(self, request, *args, **kwargs): upload = request.FILES.get('bulk_upload_file') if not upload: messages.error(request, _('no file uploaded')) return self.get(request, *args, **kwargs) if not args: messages.error(request, _('no domain specified')) return self.get(request, *args, **kwargs) domain = args[0] # stash this in soil to make it easier to pass to celery file_ref = expose_cached_download(upload.read(), expiry=1*60*60) task = import_locations_async.delay( domain, file_ref.download_id, ) file_ref.set_task(task) return HttpResponseRedirect( reverse( LocationImportStatusView.urlname, args=[domain, file_ref.download_id] ) )
def populate_export_download_task(export_instances, filters, download_id, filename=None, expiry=10 * 60 * 60): export_file = get_export_file(export_instances, filters) file_format = Format.from_format(export_file.format) filename = filename or export_instances[0].name escaped_filename = escape_quotes('%s.%s' % (filename, file_format.extension)) payload = export_file.file.payload expose_cached_download( payload, expiry, ".{}".format(file_format.extension), mimetype=file_format.mimetype, content_disposition='attachment; filename="%s"' % escaped_filename, download_id=download_id, ) export_file.file.delete()
def _schedule_task_with_flag(request, domain, action): query = _get_query(request) data = None if query: form_query_string = six.moves.urllib.parse.unquote(query) data = _url_parameters_to_dict(form_query_string) task_ref = expose_cached_download(payload=None, expiry=1 * 60 * 60, file_extension=None) task = task_generate_ids_and_operate_on_payloads.delay(data, domain, action) task_ref.set_task(task)
def _schedule_task_with_flag(request, domain, action): task_ref = expose_cached_download(payload=None, expiry=1 * 60 * 60, file_extension=None) payload_id = request.POST.get('payload_id') or None repeater_id = request.POST.get('repeater') or None task = task_generate_ids_and_operate_on_payloads.delay( payload_id, repeater_id, domain, action) task_ref.set_task(task)
def _expose_download(fpath, use_transfer, zip_name, download_id, num_forms): common_kwargs = dict( mimetype='application/zip', content_disposition='attachment; filename="{fname}.zip"'.format( fname=zip_name), download_id=download_id, ) if use_transfer: expose_file_download(fpath, use_transfer=use_transfer, **common_kwargs) else: expose_cached_download( FileWrapper(open(fpath)), expiry=(1 * 60 * 60), file_extension=file_extention_from_filename(fpath), **common_kwargs) DownloadBase.set_progress(build_form_multimedia_zip, num_forms, num_forms)
def test_user_auth_required_access_allowed(self): ref = expose_cached_download( BytesIO(b'content'), expiry=60, file_extension='txt', owner_ids=[self.couch_user.get_id], ) response = self.client.get( reverse('retrieve_download', args=[ref.download_id]) + "?get_file") self.assertEqual(response.content, b'content')
def test_user_auth_required_access_denied(self): ref = expose_cached_download( BytesIO(b'content'), expiry=60, file_extension='txt', owner_ids=['foo'], ) response = self.client.get( reverse('retrieve_download', args=[ref.download_id]) + "?get_file") self.assertEqual(response.status_code, 403)
def _schedule_task_without_flag( request: HttpRequest, domain: str, action, # type: Literal['resend', 'cancel', 'requeue'] # 3.8+ use_sql: bool, ): record_ids = _get_record_ids_from_request(request) task_ref = expose_cached_download(payload=None, expiry=1 * 60 * 60, file_extension=None) task = task_operate_on_payloads.delay(record_ids, domain, action, use_sql) task_ref.set_task(task)
def post(self, request, *args, **kwargs): upload = request.FILES.get("bulk_upload_file") """View's dispatch method automatically calls this""" try: self.workbook = WorkbookJSONReader(upload) except InvalidFileException: try: csv.DictReader(io.StringIO(upload.read().decode("ascii"), newline=None)) return HttpResponseBadRequest( "CommCare HQ no longer supports CSV upload. " "Please convert to Excel 2007 or higher (.xlsx) " "and try again." ) except UnicodeDecodeError: return HttpResponseBadRequest("Unrecognized format") except JSONReaderError as e: messages.error(request, "Your upload was unsuccessful. %s" % e.message) return self.get(request, *args, **kwargs) except HeaderValueError as e: return HttpResponseBadRequest("Upload encountered a data type error: %s" % e.message) try: self.user_specs = self.workbook.get_worksheet(title="users") except WorksheetNotFound: try: self.user_specs = self.workbook.get_worksheet() except WorksheetNotFound: return HttpResponseBadRequest("Workbook has no worksheets") try: self.group_specs = self.workbook.get_worksheet(title="groups") except WorksheetNotFound: self.group_specs = [] self.location_specs = [] if Domain.get_by_name(self.domain).commtrack_enabled: try: self.location_specs = self.workbook.get_worksheet(title="locations") except WorksheetNotFound: # if there is no sheet for locations (since this was added # later and is optional) we don't error pass try: check_headers(self.user_specs) except UserUploadError as e: return HttpResponseBadRequest(e) task_ref = expose_cached_download(None, expiry=1 * 60 * 60) task = bulk_upload_async.delay( self.domain, list(self.user_specs), list(self.group_specs), list(self.location_specs) ) task_ref.set_task(task) return HttpResponseRedirect(reverse(UserUploadStatusView.urlname, args=[self.domain, task_ref.download_id]))
def export_async(custom_export, download_id, format=None, filename=None, **kwargs): try: export_files = custom_export.get_export_files(format=format, process=export_async, **kwargs) except SchemaMismatchException, e: # fire off a delayed force update to prevent this from happening again rebuild_schemas.delay(custom_export.index) expiry = 10 * 60 * 60 expose_cached_download( "Sorry, the export failed for %s, please try again later" % custom_export._id, expiry, None, content_disposition="", mimetype="text/html", download_id=download_id).save(expiry)
def _schedule_task_with_flag( request: HttpRequest, domain: str, action, # type: Literal['resend', 'cancel', 'requeue'] # 3.8+ use_sql: bool, ): task_ref = expose_cached_download(payload=None, expiry=1 * 60 * 60, file_extension=None) payload_id = request.POST.get('payload_id') or None repeater_id = request.POST.get('repeater') or None task = task_generate_ids_and_operate_on_payloads.delay( payload_id, repeater_id, domain, action, use_sql) task_ref.set_task(task)
def _expose_download(fpath, use_transfer, zip_name, download_id, num_forms): common_kwargs = dict( mimetype='application/zip', content_disposition='attachment; filename="{fname}.zip"'.format(fname=zip_name), download_id=download_id, ) if use_transfer: expose_file_download( fpath, use_transfer=use_transfer, **common_kwargs ) else: expose_cached_download( FileWrapper(open(fpath)), expiry=(1 * 60 * 60), file_extension=file_extention_from_filename(fpath), **common_kwargs ) DownloadBase.set_progress(build_form_multimedia_zip, num_forms, num_forms)
def post(self, request, *args, **kwargs): form_ids_or_query_string = self.get_form_ids_or_query_string(request) mode = self.request.POST.get('mode') task_ref = expose_cached_download(payload=None, expiry=1 * 60 * 60, file_extension=None) task = bulk_form_management_async.delay(mode, self.domain, self.request.couch_user, form_ids_or_query_string) task_ref.set_task(task) return HttpResponseRedirect( reverse(XFormManagementStatusView.urlname, args=[self.domain, mode, task_ref.download_id]))
def _upload_fixture_api(request, domain): try: excel_file, replace, is_async = _get_fixture_upload_args_from_request(request, domain) except FixtureAPIRequestError as e: return UploadFixtureAPIResponse('fail', six.text_type(e)) with excel_file as filename: if is_async: with open(filename, 'r') as f: file_ref = expose_cached_download( f.read(), file_extension=file_extention_from_filename(filename), expiry=1 * 60 * 60, ) download_id = file_ref.download_id task = fixture_upload_async.delay( domain, download_id, replace, ) file_ref.set_task(task) status_url = "{}{}".format( get_url_base(), reverse('fixture_api_status', args=(domain, download_id)) ) curl_command = "curl -v --digest {} -u {}".format( status_url, request.user.username ) return UploadFixtureAPIResponse('success', { "download_id": download_id, "status_url": status_url, "curl_command": curl_command, "message": _("File uploaded successfully.") }) try: validate_fixture_file_format(filename) except FixtureUploadError as e: return UploadFixtureAPIResponse( 'fail', _('Please fix the following formatting issues in your Excel file: %s') % '\n'.join(e.errors)) result = upload_fixture_file(domain, filename, replace=replace) status = 'warning' if result.errors else 'success' return UploadFixtureAPIResponse(status, result.get_display_message())
def export_async(custom_export, download_id, format=None, filename=None, **kwargs): try: export_files = custom_export.get_export_files(format=format, process=export_async, **kwargs) except SchemaMismatchException as e: # fire off a delayed force update to prevent this from happening again rebuild_schemas.delay(custom_export.index) expiry = 10*60*60 expose_cached_download( "Sorry, the export failed for %s, please try again later" % custom_export._id, expiry, None, content_disposition="", mimetype="text/html", download_id=download_id ).save(expiry) else: if export_files: if export_files.format is not None: format = export_files.format if not filename: filename = custom_export.name return cache_file_to_be_served(export_files.file, export_files.checkpoint, download_id, format, filename) else: return cache_file_to_be_served(None, None, download_id, format, filename)
def populate_export_download_task(export_instances, filters, download_id, filename=None, expiry=10 * 60 * 60): export_file = get_export_file( export_instances, filters, # We don't have a great way to calculate progress if it's a bulk download, # so only track the progress for single instance exports. progress_tracker=populate_export_download_task if len(export_instances) == 1 else None ) file_format = Format.from_format(export_file.format) filename = filename or export_instances[0].name escaped_filename = escape_quotes('%s.%s' % (filename, file_format.extension)) escaped_filename = urllib.quote(escaped_filename.encode('utf8')) payload = export_file.file.payload expose_cached_download( payload, expiry, ".{}".format(file_format.extension), mimetype=file_format.mimetype, content_disposition='attachment; filename="%s"' % escaped_filename, download_id=download_id, ) export_file.file.delete()
def _cache_file(request, domain, upload): """Stash in soil for ten hours to make it easier to pass to celery :returns: `LocationImportView.Ref` object that can be identified with `isinstance(rval, LocationImportView.Ref)` or an HTTP response generated by `lock_locations` (and guaranteed not to be `LocationImportView.Ref`) if the lock could not be acquired. """ TEN_HOURS = 10 * 60 * 60 file_ref = expose_cached_download( upload.read(), expiry=TEN_HOURS, file_extension=file_extention_from_filename(upload.name), ) # put the file_ref.download_id in cache to lookup from elsewhere cache.set(import_locations_task_key(domain), file_ref.download_id, TEN_HOURS) return LocationImportView.Ref(file_ref)
def post(self, request, *args, **kwargs): form_ids_or_query_string = self.get_form_ids_or_query_string(request) mode = self.request.POST.get('mode') task_ref = expose_cached_download(payload=None, expiry=1*60*60, file_extension=None) task = bulk_form_management_async.delay( mode, self.domain, self.request.couch_user, form_ids_or_query_string ) task_ref.set_task(task) return HttpResponseRedirect( reverse( XFormManagementStatusView.urlname, args=[self.domain, mode, task_ref.download_id] ) )
def _upload_fixture_api(request, domain): try: excel_file, replace, is_async, skip_orm, email = _get_fixture_upload_args_from_request( request, domain) except FixtureAPIRequestError as e: return UploadFixtureAPIResponse('fail', str(e)) with excel_file as filename: if is_async: with open(filename, 'rb') as f: file_ref = expose_cached_download( f.read(), file_extension=file_extention_from_filename(filename), expiry=1 * 60 * 60, ) download_id = file_ref.download_id task = fixture_upload_async.delay(domain, download_id, replace, skip_orm, user_email=email) file_ref.set_task(task) status_url = "{}{}".format( get_url_base(), reverse('fixture_api_status', args=(domain, download_id))) return AsyncUploadFixtureAPIResponse( 'success', _("File has been uploaded successfully and is queued for processing." ), download_id, status_url) try: validate_fixture_file_format(filename) except FixtureUploadError as e: return UploadFixtureAPIResponse( 'fail', _('Please fix the following formatting issues in your Excel file: %s' ) % '\n'.join(e.errors)) result = upload_fixture_file(domain, filename, replace=replace) status = 'warning' if result.errors else 'success' return UploadFixtureAPIResponse(status, result.get_display_message())
def prepare_fixture_download(table_ids, domain, task, download_id): """Prepare fixture data for Excel download """ data_types_book, excel_sheets = _prepare_fixture(table_ids, domain, task=task) header_groups = [("types", excel_sheets["types"]["headers"])] value_groups = [("types", excel_sheets["types"]["rows"])] for data_type in data_types_book: header_groups.append((data_type.tag, excel_sheets[data_type.tag]["headers"])) value_groups.append((data_type.tag, excel_sheets[data_type.tag]["rows"])) file = StringIO() format = Format.XLS_2007 export_raw(tuple(header_groups), tuple(value_groups), file, format) return expose_cached_download( file.getvalue(), 60 * 60 * 2, mimetype=Format.from_format(format).mimetype, content_disposition='attachment; filename="%s_fixtures.xlsx"' % domain, download_id=download_id, )
def excel_config(request, domain): """ Step one of three. This is the initial post when the user uploads the excel file named_columns: Whether or not the first row of the excel sheet contains header strings for the columns. This defaults to True and should potentially not be an option as it is always used due to how important it is to see column headers in the rest of the importer. """ if request.method != 'POST': return HttpResponseRedirect(base.ImportCases.get_url(domain=domain)) if not request.FILES: return render_error(request, domain, 'Please choose an Excel file to import.') named_columns = request.POST.get('named_columns') == "on" uploaded_file_handle = request.FILES['file'] extension = os.path.splitext(uploaded_file_handle.name)[1][1:].strip().lower() # NOTE: We may not always be able to reference files from subsequent # views if your worker changes, so we have to store it elsewhere # using the soil framework. if extension not in importer_util.ExcelFile.ALLOWED_EXTENSIONS: return render_error(request, domain, 'The Excel file you chose could not be processed. ' 'Please check that it is saved as a Microsoft ' 'Excel 97/2000 .xls file.') # stash content in the default storage for subsequent views file_ref = expose_cached_download( uploaded_file_handle.read(), expiry=1*60*60, file_extension=file_extention_from_filename(uploaded_file_handle.name), ) request.session[EXCEL_SESSION_ID] = file_ref.download_id spreadsheet = importer_util.get_spreadsheet(file_ref, named_columns) if not spreadsheet: return _spreadsheet_expired(request, domain) columns = spreadsheet.get_header_columns() row_count = spreadsheet.get_num_rows() if row_count == 0: return render_error(request, domain, 'Your spreadsheet is empty. ' 'Please try again with a different spreadsheet.') case_types_from_apps = [] # load types from all modules for row in ApplicationBase.view( 'app_manager/types_by_module', reduce=True, group=True, startkey=[domain], endkey=[domain, {}] ).all(): if not row['key'][1] in case_types_from_apps: case_types_from_apps.append(row['key'][1]) case_types_from_cases = get_case_types_for_domain(domain) # for this we just want cases that have data but aren't being used anymore case_types_from_cases = filter(lambda x: x not in case_types_from_apps, case_types_from_cases) if len(case_types_from_apps) == 0 and len(case_types_from_cases) == 0: return render_error( request, domain, 'No cases have been submitted to this domain and there are no ' 'applications yet. You cannot import case details from an Excel ' 'file until you have existing cases or applications.' ) return render( request, "importer/excel_config.html", { 'named_columns': named_columns, 'columns': columns, 'case_types_from_cases': case_types_from_cases, 'case_types_from_apps': case_types_from_apps, 'domain': domain, 'report': { 'name': 'Import: Configuration' }, 'slug': base.ImportCases.slug } )
def build_application_zip(include_multimedia_files, include_index_files, app, download_id, build_profile_id=None, compress_zip=False, filename="commcare.zip", download_targeted_version=False): from corehq.apps.hqmedia.views import iter_app_files DownloadBase.set_progress(build_application_zip, 0, 100) initial_progress = 10 # early on indicate something is happening file_progress = 50.0 # arbitrarily say building files takes half the total time errors = [] compression = zipfile.ZIP_DEFLATED if compress_zip else zipfile.ZIP_STORED use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled if use_transfer: fpath = os.path.join(settings.SHARED_DRIVE_CONF.transfer_dir, "{}{}{}{}{}".format( app._id, 'mm' if include_multimedia_files else '', 'ccz' if include_index_files else '', app.version, build_profile_id )) if download_targeted_version: fpath += '-targeted' else: dummy, fpath = tempfile.mkstemp() DownloadBase.set_progress(build_application_zip, initial_progress, 100) if not (os.path.isfile(fpath) and use_transfer): # Don't rebuild the file if it is already there files, errors, file_count = iter_app_files( app, include_multimedia_files, include_index_files, build_profile_id, download_targeted_version=download_targeted_version, ) if toggles.CAUTIOUS_MULTIMEDIA.enabled(app.domain): manifest = json.dumps({ 'include_multimedia_files': include_multimedia_files, 'include_index_files': include_index_files, 'download_id': download_id, 'build_profile_id': build_profile_id, 'compress_zip': compress_zip, 'filename': filename, 'download_targeted_version': download_targeted_version, 'app': app.to_json(), }, indent=4) files = itertools.chain(files, [('manifest.json', manifest)]) with open(fpath, 'wb') as tmp: with zipfile.ZipFile(tmp, "w") as z: progress = initial_progress for path, data in files: # don't compress multimedia files extension = os.path.splitext(path)[1] file_compression = zipfile.ZIP_STORED if extension in MULTIMEDIA_EXTENSIONS else compression z.writestr(path, data, file_compression) progress += file_progress / file_count DownloadBase.set_progress(build_application_zip, progress, 100) # Integrity check that all media files present in media_suite.xml were added to the zip if include_multimedia_files and include_index_files and toggles.CAUTIOUS_MULTIMEDIA.enabled(app.domain): with open(fpath, 'rb') as tmp: with zipfile.ZipFile(tmp, "r") as z: media_suites = [f for f in z.namelist() if re.search(r'\bmedia_suite.xml\b', f)] if len(media_suites) != 1: message = _('Could not identify media_suite.xml in CCZ') errors.append(message) else: with z.open(media_suites[0]) as media_suite: from corehq.apps.app_manager.xform import parse_xml parsed = parse_xml(media_suite.read()) resources = {node.text for node in parsed.findall("media/resource/location[@authority='local']")} names = z.namelist() missing = [r for r in resources if re.sub(r'^\.\/', '', r) not in names] errors += [_('Media file missing from CCZ: {}').format(r) for r in missing] if errors: os.remove(fpath) raise Exception('\t' + '\t'.join(errors)) else: DownloadBase.set_progress(build_application_zip, initial_progress + file_progress, 100) common_kwargs = { 'mimetype': 'application/zip' if compress_zip else 'application/x-zip-compressed', 'content_disposition': 'attachment; filename="{fname}"'.format(fname=filename), 'download_id': download_id, 'expiry': (1 * 60 * 60), } if use_transfer: expose_file_download( fpath, use_transfer=use_transfer, **common_kwargs ) else: expose_cached_download( FileWrapper(open(fpath, 'rb')), file_extension=file_extention_from_filename(filename), **common_kwargs ) DownloadBase.set_progress(build_application_zip, 100, 100)
def post(self, request, *args, **kwargs): """View's dispatch method automatically calls this""" upload = request.FILES.get('bulk_upload_file') try: self.workbook = WorkbookJSONReader(upload) except InvalidExcelFileException: try: csv.DictReader(io.StringIO(upload.read().decode('ascii'), newline=None)) return HttpResponseBadRequest( "CommCare HQ no longer supports CSV upload. " "Please convert to Excel 2007 or higher (.xlsx) " "and try again." ) except UnicodeDecodeError: return HttpResponseBadRequest("Unrecognized format") except JSONReaderError as e: messages.error(request, 'Your upload was unsuccessful. %s' % e.message) return self.get(request, *args, **kwargs) except HeaderValueError as e: return HttpResponseBadRequest("Upload encountered a data type error: %s" % e.message) try: self.user_specs = self.workbook.get_worksheet(title='users') except WorksheetNotFound: try: self.user_specs = self.workbook.get_worksheet() except WorksheetNotFound: return HttpResponseBadRequest("Workbook has no worksheets") try: self.group_specs = self.workbook.get_worksheet(title='groups') except WorksheetNotFound: self.group_specs = [] try: check_headers(self.user_specs) except UserUploadError as e: messages.error(request, _(e.message)) return HttpResponseRedirect(reverse(UploadCommCareUsers.urlname, args=[self.domain])) # convert to list here because iterator destroys the row once it has # been read the first time self.user_specs = list(self.user_specs) for user_spec in self.user_specs: try: user_spec['username'] = enforce_string_type(user_spec['username']) except StringTypeRequiredError: messages.error( request, _("Error: Expected username to be a Text type for username {0}") .format(user_spec['username']) ) return HttpResponseRedirect(reverse(UploadCommCareUsers.urlname, args=[self.domain])) try: check_existing_usernames(self.user_specs, self.domain) except UserUploadError as e: messages.error(request, _(e.message)) return HttpResponseRedirect(reverse(UploadCommCareUsers.urlname, args=[self.domain])) try: check_duplicate_usernames(self.user_specs) except UserUploadError as e: messages.error(request, _(e.message)) return HttpResponseRedirect(reverse(UploadCommCareUsers.urlname, args=[self.domain])) task_ref = expose_cached_download(payload=None, expiry=1*60*60, file_extension=None) task = bulk_upload_async.delay( self.domain, self.user_specs, list(self.group_specs), ) task_ref.set_task(task) return HttpResponseRedirect( reverse( UserUploadStatusView.urlname, args=[self.domain, task_ref.download_id] ) )
def build_form_multimedia_zip(domain, xmlns, startdate, enddate, app_id, export_id, zip_name, download_id): def find_question_id(form, value): for k, v in form.iteritems(): if isinstance(v, dict): ret = find_question_id(v, value) if ret: return [k] + ret else: if v == value: return [k] return None def filename(form_info, question_id, extension): fname = u"%s-%s-%s-%s%s" if form_info["cases"]: fname = u"-".join(form_info["cases"]) + u"-" + fname return fname % (form_info["name"], unidecode(question_id), form_info["user"], form_info["id"], extension) case_ids = set() def extract_form_info(form, properties=None, case_ids=case_ids): unknown_number = 0 meta = form["form"].get("meta", dict()) # get case ids case_blocks = extract_case_blocks(form) cases = {c["@case_id"] for c in case_blocks} case_ids |= cases form_info = { "form": form, "attachments": list(), "name": form["form"].get("@name", "unknown form"), "user": meta.get("username", "unknown_user"), "cases": cases, "id": form["_id"], } for k, v in form["_attachments"].iteritems(): if v["content_type"] == "text/xml": continue try: question_id = unicode(u"-".join(find_question_id(form["form"], k))) except TypeError: question_id = unicode(u"unknown" + unicode(unknown_number)) unknown_number += 1 if not properties or question_id in properties: extension = unicode(os.path.splitext(k)[1]) form_info["attachments"].append( { "size": v["length"], "name": k, "question_id": question_id, "extension": extension, "timestamp": parse(form["received_on"]).timetuple(), } ) return form_info key = [domain, app_id, xmlns] form_ids = { f["id"] for f in XFormInstance.get_db().view( "attachments/attachments", start_key=key + [startdate], end_key=key + [enddate, {}], reduce=False ) } properties = set() if export_id: schema = FormExportSchema.get(export_id) for table in schema.tables: # - in question id is replaced by . in excel exports properties |= {c.display.replace(".", "-") for c in table.columns} if not app_id: zip_name = "Unrelated Form" forms_info = list() for form in iter_docs(XFormInstance.get_db(), form_ids): if not zip_name: zip_name = unidecode(form["form"].get("@name", "unknown form")) forms_info.append(extract_form_info(form, properties)) num_forms = len(forms_info) DownloadBase.set_progress(build_form_multimedia_zip, 0, num_forms) # get case names case_id_to_name = {c: c for c in case_ids} for case in iter_docs(CommCareCase.get_db(), case_ids): if case["name"]: case_id_to_name[case["_id"]] = case["name"] use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled if use_transfer: params = "_".join(map(str, [xmlns, startdate, enddate, export_id, num_forms])) fname = "{}-{}".format(app_id, hashlib.md5(params).hexdigest()) fpath = os.path.join(settings.SHARED_DRIVE_CONF.transfer_dir, fname) else: _, fpath = tempfile.mkstemp() if not (os.path.isfile(fpath) and use_transfer): # Don't rebuild the file if it is already there with open(fpath, "wb") as zfile: with zipfile.ZipFile(zfile, "w") as z: for form_number, form_info in enumerate(forms_info): f = XFormInstance.wrap(form_info["form"]) form_info["cases"] = {case_id_to_name[case_id] for case_id in form_info["cases"]} for a in form_info["attachments"]: fname = filename(form_info, a["question_id"], a["extension"]) zi = zipfile.ZipInfo(fname, a["timestamp"]) z.writestr(zi, f.fetch_attachment(a["name"], stream=True).read(), zipfile.ZIP_STORED) DownloadBase.set_progress(build_form_multimedia_zip, form_number + 1, num_forms) common_kwargs = dict( mimetype="application/zip", content_disposition='attachment; filename="{fname}.zip"'.format(fname=zip_name), download_id=download_id, ) if use_transfer: expose_file_download(fpath, use_transfer=use_transfer, **common_kwargs) else: expose_cached_download( FileWrapper(open(fpath)), expiry=(1 * 60 * 60), file_extension=file_extention_from_filename(fpath), **common_kwargs ) DownloadBase.set_progress(build_form_multimedia_zip, num_forms, num_forms)
def build_form_multimedia_zip(domain, xmlns, startdate, enddate, app_id, export_id, zip_name, download_id): def find_question_id(form, value): for k, v in form.iteritems(): if isinstance(v, dict): ret = find_question_id(v, value) if ret: return [k] + ret else: if v == value: return [k] return None def filename(form_info, question_id, extension): fname = u"%s-%s-%s-%s%s" if form_info['cases']: fname = u'-'.join(form_info['cases']) + u'-' + fname return fname % (form_info['name'], unidecode(question_id), form_info['user'], form_info['id'], extension) case_ids = set() def extract_form_info(form, properties=None, case_ids=case_ids): unknown_number = 0 meta = form['form'].get('meta', dict()) # get case ids case_blocks = extract_case_blocks(form) cases = {c['@case_id'] for c in case_blocks} case_ids |= cases form_info = { 'form': form, 'attachments': list(), 'name': form['form'].get('@name', 'unknown form'), 'user': meta.get('username', 'unknown_user'), 'cases': cases, 'id': form['_id'] } for k, v in form['_attachments'].iteritems(): if v['content_type'] == 'text/xml': continue try: question_id = unicode(u'-'.join(find_question_id(form['form'], k))) except TypeError: question_id = unicode(u'unknown' + unicode(unknown_number)) unknown_number += 1 if not properties or question_id in properties: extension = unicode(os.path.splitext(k)[1]) form_info['attachments'].append({ 'size': v['length'], 'name': k, 'question_id': question_id, 'extension': extension, 'timestamp': parse(form['received_on']).timetuple(), }) return form_info key = [domain, app_id, xmlns] form_ids = {f['id'] for f in XFormInstance.get_db().view("attachments/attachments", start_key=key + [startdate], end_key=key + [enddate, {}], reduce=False)} properties = set() if export_id: schema = FormExportSchema.get(export_id) for table in schema.tables: # - in question id is replaced by . in excel exports properties |= {c.display.replace('.', '-') for c in table.columns} if not app_id: zip_name = 'Unrelated Form' forms_info = list() for form in iter_docs(XFormInstance.get_db(), form_ids): if not zip_name: zip_name = unidecode(form['form'].get('@name', 'unknown form')) forms_info.append(extract_form_info(form, properties)) num_forms = len(forms_info) DownloadBase.set_progress(build_form_multimedia_zip, 0, num_forms) # get case names case_id_to_name = {c: c for c in case_ids} for case in iter_docs(CommCareCase.get_db(), case_ids): if case['name']: case_id_to_name[case['_id']] = case['name'] use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled if use_transfer: params = '_'.join(map(str, [xmlns, startdate, enddate, export_id, num_forms])) fname = '{}-{}'.format(app_id, hashlib.md5(params).hexdigest()) fpath = os.path.join(settings.SHARED_DRIVE_CONF.transfer_dir, fname) else: _, fpath = tempfile.mkstemp() if not (os.path.isfile(fpath) and use_transfer): # Don't rebuild the file if it is already there with open(fpath, 'wb') as zfile: with zipfile.ZipFile(zfile, 'w') as z: for form_number, form_info in enumerate(forms_info): f = XFormInstance.wrap(form_info['form']) form_info['cases'] = {case_id_to_name[case_id] for case_id in form_info['cases']} for a in form_info['attachments']: fname = filename(form_info, a['question_id'], a['extension']) zi = zipfile.ZipInfo(fname, a['timestamp']) z.writestr(zi, f.fetch_attachment(a['name'], stream=True).read(), zipfile.ZIP_STORED) DownloadBase.set_progress(build_form_multimedia_zip, form_number + 1, num_forms) common_kwargs = dict( mimetype='application/zip', content_disposition='attachment; filename="{fname}.zip"'.format(fname=zip_name), download_id=download_id, ) if use_transfer: expose_file_download( fpath, use_transfer=use_transfer, **common_kwargs ) else: expose_cached_download( FileWrapper(open(fpath)), expiry=(1 * 60 * 60), file_extension=file_extention_from_filename(fpath), **common_kwargs ) DownloadBase.set_progress(build_form_multimedia_zip, num_forms, num_forms)
def build_application_zip(include_multimedia_files, include_index_files, app, download_id, build_profile_id=None, compress_zip=False, filename="commcare.zip", download_targeted_version=False): from corehq.apps.hqmedia.views import iter_app_files DownloadBase.set_progress(build_application_zip, 0, 100) initial_progress = 10 # early on indicate something is happening file_progress = 50.0 # arbitrarily say building files takes half the total time errors = [] compression = zipfile.ZIP_DEFLATED if compress_zip else zipfile.ZIP_STORED use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled if use_transfer: fpath = os.path.join(settings.SHARED_DRIVE_CONF.transfer_dir, "{}{}{}{}{}".format( app._id, 'mm' if include_multimedia_files else '', 'ccz' if include_index_files else '', app.version, build_profile_id )) if download_targeted_version: fpath += '-targeted' else: _, fpath = tempfile.mkstemp() DownloadBase.set_progress(build_application_zip, initial_progress, 100) if not (os.path.isfile(fpath) and use_transfer): # Don't rebuild the file if it is already there files, errors, file_count = iter_app_files( app, include_multimedia_files, include_index_files, build_profile_id, download_targeted_version=download_targeted_version, ) with open(fpath, 'wb') as tmp: with zipfile.ZipFile(tmp, "w") as z: progress = initial_progress for path, data in files: # don't compress multimedia files extension = os.path.splitext(path)[1] file_compression = zipfile.ZIP_STORED if extension in MULTIMEDIA_EXTENSIONS else compression z.writestr(path, data, file_compression) progress += file_progress / file_count DownloadBase.set_progress(build_application_zip, progress, 100) else: DownloadBase.set_progress(build_application_zip, initial_progress + file_progress, 100) common_kwargs = { 'mimetype': 'application/zip' if compress_zip else 'application/x-zip-compressed', 'content_disposition': 'attachment; filename="{fname}"'.format(fname=filename), 'download_id': download_id, 'expiry': (1 * 60 * 60), } if use_transfer: expose_file_download( fpath, use_transfer=use_transfer, **common_kwargs ) else: expose_cached_download( FileWrapper(open(fpath, 'rb')), file_extension=file_extention_from_filename(filename), **common_kwargs ) DownloadBase.set_progress(build_application_zip, 100, 100) return { "errors": errors, }