def post(self, request, *args, **kwargs): upload = request.FILES.get('bulk_upload_file') if not upload: messages.error(request, _('no file uploaded')) return self.get(request, *args, **kwargs) elif not upload.name.endswith('.xlsx'): messages.error(request, _('please use xlsx format only')) return self.get(request, *args, **kwargs) domain = args[0] # stash this in soil to make it easier to pass to celery file_ref = expose_cached_download( upload.read(), expiry=1*60*60, file_extension=file_extention_from_filename(upload.name) ) task = import_products_async.delay( domain, file_ref.download_id, ) file_ref.set_task(task) return HttpResponseRedirect( reverse( ProductImportStatusView.urlname, args=[domain, file_ref.download_id] ) )
def post(self, request, *args, **kwargs): upload = request.FILES.get('bulk_upload_file') if not upload: messages.error(request, _('no file uploaded')) return self.get(request, *args, **kwargs) if not args: messages.error(request, _('no domain specified')) return self.get(request, *args, **kwargs) if upload.content_type != 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': messages.error(request, _("Invalid file-format. Please upload a valid xlsx file.")) return self.get(request, *args, **kwargs) domain = args[0] # stash this in soil to make it easier to pass to celery TEN_HOURS = 10 * 60 * 60 file_ref = expose_cached_download( upload.read(), expiry=TEN_HOURS, file_extension=file_extention_from_filename(upload.name), ) # We need to start this task after this current request finishes because this # request uses the lock_locations decorator which acquires the same lock that # the task will try to acquire. task = import_locations_async.apply_async(args=[domain, file_ref.download_id], countdown=10) # put the file_ref.download_id in cache to lookup from elsewhere cache.set(import_locations_task_key(domain), file_ref.download_id, TEN_HOURS) file_ref.set_task(task) return HttpResponseRedirect( reverse( LocationImportStatusView.urlname, args=[domain, file_ref.download_id] ) )
def post(self, request, *args, **kwargs): upload = request.FILES.get('bulk_upload_file') if not upload: messages.error(request, _('no file uploaded')) return self.get(request, *args, **kwargs) if not args: messages.error(request, _('no domain specified')) return self.get(request, *args, **kwargs) if upload.content_type != 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': messages.error( request, _("Invalid file-format. Please upload a valid xlsx file.")) return self.get(request, *args, **kwargs) domain = args[0] # stash this in soil to make it easier to pass to celery ONE_HOUR = 1 * 60 * 60 file_ref = expose_cached_download( upload.read(), expiry=ONE_HOUR, file_extension=file_extention_from_filename(upload.name), ) task = import_locations_async.delay( domain, file_ref.download_id, ) # put the file_ref.download_id in cache to lookup from elsewhere cache.set(import_locations_task_key(domain), file_ref.download_id, ONE_HOUR) file_ref.set_task(task) return HttpResponseRedirect( reverse(LocationImportStatusView.urlname, args=[domain, file_ref.download_id]))
def post(self, request): replace = 'replace' in request.POST file_ref = expose_cached_download( request.file.read(), file_extension=file_extention_from_filename(request.file.name), expiry=1 * 60 * 60, ) # catch basic validation in the synchronous UI try: validate_file_format(file_ref.get_filename()) except (FixtureUploadError, JSONReaderError, HeaderValueError) as e: messages.error(request, _(u'Upload unsuccessful: %s') % e) return HttpResponseRedirect(fixtures_home(self.domain)) # hand off to async task = fixture_upload_async.delay( self.domain, file_ref.download_id, replace, ) file_ref.set_task(task) return HttpResponseRedirect( reverse(FixtureUploadStatusView.urlname, args=[self.domain, file_ref.download_id]))
def process_upload(self): if hasattr(self.uploaded_file, 'temporary_file_path') and settings.SHARED_DRIVE_CONF.temp_dir: processing_id = uuid.uuid4().hex path = settings.SHARED_DRIVE_CONF.get_temp_file(suffix='.upload') shutil.move(self.uploaded_file.temporary_file_path(), path) status = BulkMultimediaStatusCacheNfs(processing_id, path) status.save() else: self.uploaded_file.file.seek(0) saved_file = expose_cached_download( self.uploaded_file.file.read(), expiry=BulkMultimediaStatusCache.cache_expiry, file_extension=file_extention_from_filename(self.uploaded_file.name), ) processing_id = saved_file.download_id status = BulkMultimediaStatusCache(processing_id) status.save() process_bulk_upload_zip.delay(processing_id, self.domain, self.app_id, username=self.username, share_media=self.share_media, license_name=self.license_used, author=self.author, attribution_notes=self.attribution_notes) return status.get_response()
def post(self, request): replace = 'replace' in request.POST file_ref = expose_cached_download( request.file.read(), file_extension=file_extention_from_filename(request.file.name), expiry=1*60*60, ) # catch basic validation in the synchronous UI try: validate_file_format(file_ref.get_filename()) except (FixtureUploadError, JSONReaderError, HeaderValueError) as e: messages.error(request, _(u'Upload unsuccessful: %s') % e) return HttpResponseRedirect(fixtures_home(self.domain)) # hand off to async task = fixture_upload_async.delay( self.domain, file_ref.download_id, replace, ) file_ref.set_task(task) return HttpResponseRedirect( reverse( FixtureUploadStatusView.urlname, args=[self.domain, file_ref.download_id] ) )
def send_monthly_sms_report(): subject = _('Monthly SMS report') recipients = ['*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**'] try: start_date = date.today().replace(day=1) - relativedelta(months=1) first_day, last_day = calendar.monthrange(start_date.year, start_date.month) end_date = start_date.replace(day=last_day) filename = call_command('get_icds_sms_usage', 'icds-cas', str(start_date), str(end_date)) with open(filename, 'rb') as f: cached_download = expose_cached_download( f.read(), expiry=24 * 60 * 60, file_extension=file_extention_from_filename(filename), mimetype=Format.from_format(Format.XLS_2007).mimetype, content_disposition='attachment; filename="%s"' % filename) path = reverse('retrieve_download', kwargs={'download_id': cached_download.download_id}) link = f"{web.get_url_base()}{path}?get_file" message = _(""" Hi, Please download the sms report for last month at {link}. The report is available only till midnight today. """).format(link=link) send_html_email_async.delay(subject, recipients, message, email_from=settings.DEFAULT_FROM_EMAIL) except Exception as e: message = _(""" Hi, Could not generate the montly SMS report for ICDS. The error has been notified. Please report as an issue for quick followup """) send_html_email_async.delay(subject, recipients, message, email_from=settings.DEFAULT_FROM_EMAIL) raise e
def get_tempfile_ref_for_contents(self, identifier): try: filename, content = self._get_filename_content(identifier) except (TypeError, ValueError): return None suffix = file_extention_from_filename(filename) return make_temp_file(content, suffix)
def process_upload(self): if hasattr(self.uploaded_file, 'temporary_file_path') and settings.SHARED_DRIVE_CONF.temp_dir: processing_id = uuid.uuid4().hex path = settings.SHARED_DRIVE_CONF.get_temp_file(suffix='.upload') shutil.move(self.uploaded_file.temporary_file_path(), path) status = BulkMultimediaStatusCacheNfs(processing_id, path) status.save() else: self.uploaded_file.file.seek(0) saved_file = expose_cached_download( self.uploaded_file.file.read(), expiry=BulkMultimediaStatusCache.cache_expiry, file_extension=file_extention_from_filename(self.uploaded_file.name), ) processing_id = saved_file.download_id status = BulkMultimediaStatusCache(processing_id) status.save() process_bulk_upload_zip.delay(processing_id, self.domain, self.app_id, username=self.username, share_media=self.share_media, license_name=self.license_used, author=self.author, attribution_notes=self.attribution_notes) return status.get_response()
def post(self, request, *args, **kwargs): upload = request.FILES.get('bulk_upload_file') if not upload: messages.error(request, _('no file uploaded')) return self.get(request, *args, **kwargs) elif not upload.name.endswith('.xlsx'): messages.error(request, _('please use xlsx format only')) return self.get(request, *args, **kwargs) domain = args[0] # stash this in soil to make it easier to pass to celery file_ref = expose_cached_download( upload.read(), expiry=1*60*60, file_extension=file_extention_from_filename(upload.name) ) task = import_products_async.delay( domain, file_ref.download_id, ) file_ref.set_task(task) return HttpResponseRedirect( reverse( ProductImportStatusView.urlname, args=[domain, file_ref.download_id] ) )
def post(self, request, *args, **kwargs): upload = request.FILES.get('bulk_upload_file') if not upload: messages.error(request, _('no file uploaded')) return self.get(request, *args, **kwargs) if not args: messages.error(request, _('no domain specified')) return self.get(request, *args, **kwargs) if upload.content_type != 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': messages.error(request, _("Invalid file-format. Please upload a valid xlsx file.")) return self.get(request, *args, **kwargs) domain = args[0] # stash this in soil to make it easier to pass to celery ONE_HOUR = 1*60*60 file_ref = expose_cached_download( upload.read(), expiry=ONE_HOUR, file_extension=file_extention_from_filename(upload.name), ) task = import_locations_async.delay( domain, file_ref.download_id, ) # put the file_ref.download_id in cache to lookup from elsewhere cache.set(import_locations_task_key(domain), file_ref.download_id, ONE_HOUR) file_ref.set_task(task) return HttpResponseRedirect( reverse( LocationImportStatusView.urlname, args=[domain, file_ref.download_id] ) )
def get_tempfile_ref_for_contents(self, identifier): try: filename, content = self._get_filename_content(identifier) except (TypeError, ValueError): return None suffix = file_extention_from_filename(filename) return make_temp_file(content, suffix)
def post(self, request): replace = 'replace' in request.POST file_ref = expose_cached_download( request.file.read(), file_extension=file_extention_from_filename(request.file.name), expiry=1*60*60, ) # catch basic validation in the synchronous UI try: validate_fixture_file_format(file_ref.get_filename()) except FixtureUploadError as e: messages.error( request, _('Please fix the following formatting issues in your Excel file: %s') % '<ul><li>{}</li></ul>'.format('</li><li>'.join(e.errors)), extra_tags='html' ) return HttpResponseRedirect(fixtures_home(self.domain)) # hand off to async task = fixture_upload_async.delay( self.domain, file_ref.download_id, replace, ) file_ref.set_task(task) return HttpResponseRedirect( reverse( FixtureUploadStatusView.urlname, args=[self.domain, file_ref.download_id] ) )
def post(self, request, *args, **kwargs): upload = request.FILES.get('bulk_upload_file') if not upload: messages.error(request, _('no file uploaded')) return self.get(request, *args, **kwargs) if not args: messages.error(request, _('no domain specified')) return self.get(request, *args, **kwargs) domain = args[0] # stash this in soil to make it easier to pass to celery file_ref = expose_cached_download( upload.read(), expiry=1*60*60, file_extension=file_extention_from_filename(upload.name), ) task = import_locations_async.delay( domain, file_ref.download_id, ) file_ref.set_task(task) return HttpResponseRedirect( reverse( LocationImportStatusView.urlname, args=[domain, file_ref.download_id] ) )
def post(self, request): replace = 'replace' in request.POST file_ref = expose_cached_download( request.file.read(), file_extension=file_extention_from_filename(request.file.name), expiry=1*60*60, ) # catch basic validation in the synchronous UI try: validate_fixture_file_format(file_ref.get_filename()) except FixtureUploadError as e: messages.error( request, _(u'Please fix the following formatting issues in your excel file: %s') % '<ul><li>{}</li></ul>'.format('</li><li>'.join(e.errors)), extra_tags='html' ) return HttpResponseRedirect(fixtures_home(self.domain)) # hand off to async task = fixture_upload_async.delay( self.domain, file_ref.download_id, replace, ) file_ref.set_task(task) return HttpResponseRedirect( reverse( FixtureUploadStatusView.urlname, args=[self.domain, file_ref.download_id] ) )
def get_tempfile_ref_for_contents(self, identifier): try: filename, content = self._get_filename_content(identifier) if isinstance(content, six.text_type): content = content.encode('utf-8') except (TypeError, ValueError): return None suffix = file_extention_from_filename(filename) return make_temp_file(content, suffix)
def build_application_zip(include_multimedia_files, include_index_files, app, download_id, build_profile_id=None, compress_zip=False, filename="commcare.zip"): from corehq.apps.hqmedia.views import iter_app_files DownloadBase.set_progress(build_application_zip, 0, 100) errors = [] compression = zipfile.ZIP_DEFLATED if compress_zip else zipfile.ZIP_STORED use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled if use_transfer: fpath = os.path.join( settings.SHARED_DRIVE_CONF.transfer_dir, "{}{}{}{}{}".format(app._id, 'mm' if include_multimedia_files else '', 'ccz' if include_index_files else '', app.version, build_profile_id)) else: _, fpath = tempfile.mkstemp() if not (os.path.isfile(fpath) and use_transfer): # Don't rebuild the file if it is already there files, errors = iter_app_files(app, include_multimedia_files, include_index_files, build_profile_id) with open(fpath, 'wb') as tmp: with zipfile.ZipFile(tmp, "w") as z: for path, data in files: # don't compress multimedia files extension = os.path.splitext(path)[1] file_compression = zipfile.ZIP_STORED if extension in MULTIMEDIA_EXTENSIONS else compression z.writestr(path, data, file_compression) common_kwargs = dict( mimetype='application/zip' if compress_zip else 'application/x-zip-compressed', content_disposition='attachment; filename="{fname}"'.format( fname=filename), download_id=download_id, ) if use_transfer: expose_file_download(fpath, use_transfer=use_transfer, **common_kwargs) else: expose_cached_download( FileWrapper(open(fpath)), expiry=(1 * 60 * 60), file_extension=file_extention_from_filename(filename), **common_kwargs) DownloadBase.set_progress(build_application_zip, 100, 100) return { "errors": errors, }
def test_transient(self, file_store): with open(self.filename, 'rb') as f: identifier = file_store.write_file(f, 'test_file.txt', 'test-domain').identifier tmpfile = file_store.get_tempfile_ref_for_contents(identifier) self.assertEqual(file_extention_from_filename(tmpfile), '.txt') with open(tmpfile, encoding='utf-8') as f: self.assertEqual(f.read(), self.content) self.assertEqual(file_store.get_filename(identifier), 'test_file.txt')
def test_transient(self, file_store): with open(self.filename, 'r') as f: identifier = file_store.write_file(f, 'test_file.txt').identifier tmpfile = file_store.get_tempfile_ref_for_contents(identifier) self.assertEqual(file_extention_from_filename(tmpfile), '.txt') with open(tmpfile) as f: self.assertEqual(f.read(), self.content) self.assertEqual(file_store.get_filename(identifier), 'test_file.txt')
def build_application_zip(include_multimedia_files, include_index_files, app, download_id, build_profile_id=None, compress_zip=False, filename="commcare.zip"): from corehq.apps.hqmedia.views import iter_app_files DownloadBase.set_progress(build_application_zip, 0, 100) errors = [] compression = zipfile.ZIP_DEFLATED if compress_zip else zipfile.ZIP_STORED use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled if use_transfer: fpath = os.path.join(settings.SHARED_DRIVE_CONF.transfer_dir, "{}{}{}{}{}".format( app._id, 'mm' if include_multimedia_files else '', 'ccz' if include_index_files else '', app.version, build_profile_id )) else: _, fpath = tempfile.mkstemp() if not (os.path.isfile(fpath) and use_transfer): # Don't rebuild the file if it is already there files, errors = iter_app_files(app, include_multimedia_files, include_index_files, build_profile_id) with open(fpath, 'wb') as tmp: with zipfile.ZipFile(tmp, "w") as z: for path, data in files: # don't compress multimedia files extension = os.path.splitext(path)[1] file_compression = zipfile.ZIP_STORED if extension in MULTIMEDIA_EXTENSIONS else compression z.writestr(path, data, file_compression) common_kwargs = dict( mimetype='application/zip' if compress_zip else 'application/x-zip-compressed', content_disposition='attachment; filename="{fname}"'.format(fname=filename), download_id=download_id, ) if use_transfer: expose_file_download( fpath, use_transfer=use_transfer, **common_kwargs ) else: expose_cached_download( FileWrapper(open(fpath)), expiry=(1 * 60 * 60), file_extension=file_extention_from_filename(filename), **common_kwargs ) DownloadBase.set_progress(build_application_zip, 100, 100) return { "errors": errors, }
def _process_bulk_upload(bulk_file, domain): filename = make_temp_file(bulk_file.read(), file_extention_from_filename(bulk_file.name)) errors = [] with open_any_workbook(filename) as workbook: for worksheet in workbook.worksheets: case_type = worksheet.title for row in itertools.islice(worksheet.iter_rows(), 1, None): name, group, data_type, description, deprecated = [cell.value for cell in row[:5]] if name: error = save_case_property(name, case_type, domain, data_type, description, group, deprecated) if error: errors.append(error) return errors
def _expose_download_link(fpath, filename, compress_zip, download_id): common_kwargs = { 'mimetype': 'application/zip' if compress_zip else 'application/x-zip-compressed', 'content_disposition': 'attachment; filename="{fname}"'.format(fname=filename), 'download_id': download_id, 'expiry': (1 * 60 * 60), } if settings.SHARED_DRIVE_CONF.transfer_enabled: expose_file_download(fpath, use_transfer=True, **common_kwargs) else: expose_cached_download(FileWrapper(open(fpath, 'rb')), file_extension=file_extention_from_filename(filename), **common_kwargs)
def _upload_fixture_api(request, domain): try: excel_file, replace, is_async = _get_fixture_upload_args_from_request(request, domain) except FixtureAPIRequestError as e: return UploadFixtureAPIResponse('fail', six.text_type(e)) with excel_file as filename: if is_async: with open(filename, 'r') as f: file_ref = expose_cached_download( f.read(), file_extension=file_extention_from_filename(filename), expiry=1 * 60 * 60, ) download_id = file_ref.download_id task = fixture_upload_async.delay( domain, download_id, replace, ) file_ref.set_task(task) status_url = "{}{}".format( get_url_base(), reverse('fixture_api_status', args=(domain, download_id)) ) curl_command = "curl -v --digest {} -u {}".format( status_url, request.user.username ) return UploadFixtureAPIResponse('success', { "download_id": download_id, "status_url": status_url, "curl_command": curl_command, "message": _("File uploaded successfully.") }) try: validate_fixture_file_format(filename) except FixtureUploadError as e: return UploadFixtureAPIResponse( 'fail', _('Please fix the following formatting issues in your Excel file: %s') % '\n'.join(e.errors)) result = upload_fixture_file(domain, filename, replace=replace) status = 'warning' if result.errors else 'success' return UploadFixtureAPIResponse(status, result.get_display_message())
def _process_bulk_upload(bulk_file, domain): filename = make_temp_file(bulk_file.read(), file_extention_from_filename(bulk_file.name)) errors = [] import_fhir_data = toggles.FHIR_INTEGRATION.enabled(domain) fhir_resource_type_by_case_type = {} expected_columns_in_prop_sheet = 5 if import_fhir_data: expected_columns_in_prop_sheet = 7 with open_any_workbook(filename) as workbook: for worksheet in workbook.worksheets: if worksheet.title == FHIR_RESOURCE_TYPE_MAPPING_SHEET: if import_fhir_data: _errors, fhir_resource_type_by_case_type = _process_fhir_resource_type_mapping_sheet( domain, worksheet) errors.extend(_errors) continue case_type = worksheet.title for (i, row) in enumerate( itertools.islice(worksheet.iter_rows(), 1, None)): if len(row) < expected_columns_in_prop_sheet: error = _('Not enough columns') else: error, fhir_resource_prop_path, fhir_resource_type, remove_path = None, None, None, None name, group, data_type, description, deprecated = [ cell.value for cell in row[:5] ] if import_fhir_data: fhir_resource_prop_path, remove_path = row[5:] remove_path = remove_path == 'Y' if remove_path else False fhir_resource_type = fhir_resource_type_by_case_type.get( case_type) if fhir_resource_prop_path and not fhir_resource_type: error = _('Could not find resource type for {}' ).format(case_type) if not error: error = save_case_property(name, case_type, domain, data_type, description, group, deprecated, fhir_resource_prop_path, fhir_resource_type, remove_path) if error: errors.append( _('Error in case type {}, row {}: {}').format( case_type, i, error)) return errors
def _process_bulk_upload(bulk_file, domain): filename = make_temp_file(bulk_file.read(), file_extention_from_filename(bulk_file.name)) errors = [] with open_any_workbook(filename) as workbook: for worksheet in workbook.worksheets: case_type = worksheet.title for (i, row) in enumerate(itertools.islice(worksheet.iter_rows(), 1, None)): if len(row) < 5: error = _('Not enough columns') else: name, group, data_type, description, deprecated = [cell.value for cell in row[:5]] error = save_case_property(name, case_type, domain, data_type, description, group, deprecated) if error: errors.append(_('Error in case type {}, row {}: {}').format(case_type, i, error)) return errors
def _cache_file(request, domain, upload): """Stash in soil for ten hours to make it easier to pass to celery :returns: `LocationImportView.Ref` object that can be identified with `isinstance(rval, LocationImportView.Ref)` or an HTTP response generated by `lock_locations` (and guaranteed not to be `LocationImportView.Ref`) if the lock could not be acquired. """ TEN_HOURS = 10 * 60 * 60 file_ref = expose_cached_download( upload.read(), expiry=TEN_HOURS, file_extension=file_extention_from_filename(upload.name), ) # put the file_ref.download_id in cache to lookup from elsewhere cache.set(import_locations_task_key(domain), file_ref.download_id, TEN_HOURS) return LocationImportView.Ref(file_ref)
def _expose_download(fpath, use_transfer, zip_name, download_id, num_forms): common_kwargs = dict( mimetype='application/zip', content_disposition='attachment; filename="{fname}.zip"'.format( fname=zip_name), download_id=download_id, ) if use_transfer: expose_file_download(fpath, use_transfer=use_transfer, **common_kwargs) else: expose_cached_download( FileWrapper(open(fpath)), expiry=(1 * 60 * 60), file_extension=file_extention_from_filename(fpath), **common_kwargs) DownloadBase.set_progress(build_form_multimedia_zip, num_forms, num_forms)
def _upload_fixture_api(request, domain): try: excel_file, replace, is_async, skip_orm, email = _get_fixture_upload_args_from_request( request, domain) except FixtureAPIRequestError as e: return UploadFixtureAPIResponse('fail', str(e)) with excel_file as filename: if is_async: with open(filename, 'rb') as f: file_ref = expose_cached_download( f.read(), file_extension=file_extention_from_filename(filename), expiry=1 * 60 * 60, ) download_id = file_ref.download_id task = fixture_upload_async.delay(domain, download_id, replace, skip_orm, user_email=email) file_ref.set_task(task) status_url = "{}{}".format( get_url_base(), reverse('fixture_api_status', args=(domain, download_id))) return AsyncUploadFixtureAPIResponse( 'success', _("File has been uploaded successfully and is queued for processing." ), download_id, status_url) try: validate_fixture_file_format(filename) except FixtureUploadError as e: return UploadFixtureAPIResponse( 'fail', _('Please fix the following formatting issues in your Excel file: %s' ) % '\n'.join(e.errors)) result = upload_fixture_file(domain, filename, replace=replace) status = 'warning' if result.errors else 'success' return UploadFixtureAPIResponse(status, result.get_display_message())
def send_custom_sms_report(start_date: str, end_date: str, email: str, domain: str): subject = _('Monthly SMS report') recipients = [email] try: filename = call_command('get_icds_sms_usage', 'icds-cas', start_date, end_date) with open(filename, 'rb') as f: cached_download = expose_cached_download( f.read(), expiry=24 * 60 * 60, file_extension=file_extention_from_filename(filename), mimetype=Format.from_format(Format.XLS_2007).mimetype, content_disposition='attachment; filename="%s"' % filename) path = reverse('retrieve_download', kwargs={'download_id': cached_download.download_id}) link = f"{web.get_url_base()}{path}?get_file" message = _(""" Hi, Please download the sms report for time frame {start_date} to {end_date} (inclusive) at {link}. The report is available only for next 24 hours. """).format(link=link, start_date=start_date, end_date=end_date) send_html_email_async.delay(subject, recipients, message, email_from=settings.DEFAULT_FROM_EMAIL) except Exception as e: message = _(""" Hi, Could not generate the custom SMS report for ICDS. The error has been notified. Please report as an issue for quick followup """) send_html_email_async.delay(subject, recipients, message, email_from=settings.DEFAULT_FROM_EMAIL) raise e finally: report_tracker = CustomSMSReportTracker(domain) report_tracker.remove_report(start_date, end_date)
def _expose_download(fpath, use_transfer, zip_name, download_id, num_forms): common_kwargs = dict( mimetype='application/zip', content_disposition='attachment; filename="{fname}.zip"'.format(fname=zip_name), download_id=download_id, ) if use_transfer: expose_file_download( fpath, use_transfer=use_transfer, **common_kwargs ) else: expose_cached_download( FileWrapper(open(fpath)), expiry=(1 * 60 * 60), file_extension=file_extention_from_filename(fpath), **common_kwargs ) DownloadBase.set_progress(build_form_multimedia_zip, num_forms, num_forms)
def build_application_zip(include_multimedia_files, include_index_files, app, download_id, build_profile_id=None, compress_zip=False, filename="commcare.zip", download_targeted_version=False): from corehq.apps.hqmedia.views import iter_app_files DownloadBase.set_progress(build_application_zip, 0, 100) initial_progress = 10 # early on indicate something is happening file_progress = 50.0 # arbitrarily say building files takes half the total time errors = [] compression = zipfile.ZIP_DEFLATED if compress_zip else zipfile.ZIP_STORED use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled if use_transfer: fpath = os.path.join( settings.SHARED_DRIVE_CONF.transfer_dir, "{}{}{}{}{}".format(app._id, 'mm' if include_multimedia_files else '', 'ccz' if include_index_files else '', app.version, build_profile_id)) if download_targeted_version: fpath += '-targeted' else: dummy, fpath = tempfile.mkstemp() DownloadBase.set_progress(build_application_zip, initial_progress, 100) if not (os.path.isfile(fpath) and use_transfer): # Don't rebuild the file if it is already there files, errors, file_count = iter_app_files( app, include_multimedia_files, include_index_files, build_profile_id, download_targeted_version=download_targeted_version, ) if toggles.CAUTIOUS_MULTIMEDIA.enabled(app.domain): manifest = json.dumps( { 'include_multimedia_files': include_multimedia_files, 'include_index_files': include_index_files, 'download_id': download_id, 'build_profile_id': build_profile_id, 'compress_zip': compress_zip, 'filename': filename, 'download_targeted_version': download_targeted_version, 'app': app.to_json(), }, indent=4) files = itertools.chain(files, [('manifest.json', manifest)]) with open(fpath, 'wb') as tmp: with zipfile.ZipFile(tmp, "w") as z: progress = initial_progress for path, data in files: # don't compress multimedia files extension = os.path.splitext(path)[1] file_compression = zipfile.ZIP_STORED if extension in MULTIMEDIA_EXTENSIONS else compression z.writestr(path, data, file_compression) progress += file_progress / file_count DownloadBase.set_progress(build_application_zip, progress, 100) # Integrity check that all media files present in media_suite.xml were added to the zip if include_multimedia_files and include_index_files and toggles.CAUTIOUS_MULTIMEDIA.enabled( app.domain): with open(fpath, 'rb') as tmp: with zipfile.ZipFile(tmp, "r") as z: media_suites = [ f for f in z.namelist() if re.search(r'\bmedia_suite.xml\b', f) ] if len(media_suites) != 1: message = _( 'Could not identify media_suite.xml in CCZ') errors.append(message) notify_exception(None, "[ICDS-291] {}".format(message)) else: with z.open(media_suites[0]) as media_suite: from corehq.apps.app_manager.xform import parse_xml parsed = parse_xml(media_suite.read()) resources = { node.text for node in parsed.findall( "media/resource/location[@authority='local']" ) } names = z.namelist() missing = [ r for r in resources if re.sub(r'^\.\/', '', r) not in names ] if missing: soft_assert(notify_admins=True)( False, '[ICDS-291] Files missing from CCZ', [{ 'missing file count': len(missing), 'app_id': app._id, 'version': app.version, 'build_profile_id': build_profile_id, }, { 'files': missing, }]) errors += [ _('Media file missing from CCZ: {}').format(r) for r in missing ] if errors: os.remove(fpath) update_task_state(build_application_zip, states.FAILURE, {'errors': errors}) raise Ignore( ) # We want the task to fail hard, so ignore any future updates to it else: DownloadBase.set_progress(build_application_zip, initial_progress + file_progress, 100) common_kwargs = { 'mimetype': 'application/zip' if compress_zip else 'application/x-zip-compressed', 'content_disposition': 'attachment; filename="{fname}"'.format(fname=filename), 'download_id': download_id, 'expiry': (1 * 60 * 60), } if use_transfer: expose_file_download(fpath, use_transfer=use_transfer, **common_kwargs) else: expose_cached_download( FileWrapper(open(fpath, 'rb')), file_extension=file_extention_from_filename(filename), **common_kwargs) DownloadBase.set_progress(build_application_zip, 100, 100)
def get_tempfile_ref_for_contents(self, identifier): filename = self.get_filename(identifier) suffix = file_extention_from_filename(filename) content = self._db.get(identifier, bucket=self._bucket).read() return make_temp_file(content, suffix)
def _process_bulk_upload(bulk_file, domain): filename = make_temp_file(bulk_file.read(), file_extention_from_filename(bulk_file.name)) errors = [] import_fhir_data = toggles.FHIR_INTEGRATION.enabled(domain) fhir_resource_type_by_case_type = {} expected_columns_in_prop_sheet = 5 data_type_map = {t.label: t.value for t in CaseProperty.DataType} if import_fhir_data: expected_columns_in_prop_sheet = 7 worksheets = [] allowed_value_info = {} prop_row_info = {} seen_props = defaultdict(set) missing_valid_values = set() with open_any_workbook(filename) as workbook: for worksheet in workbook.worksheets: if worksheet.title.endswith(ALLOWED_VALUES_SHEET_SUFFIX): case_type = worksheet.title[:-len(ALLOWED_VALUES_SHEET_SUFFIX)] allowed_value_info[case_type] = defaultdict(dict) prop_row_info[case_type] = defaultdict(list) for (i, row) in enumerate(itertools.islice(worksheet.iter_rows(), 1, None), start=2): row_len = len(row) if row_len < 1: # simply ignore any fully blank rows continue if row_len < 3: # if missing value or description, fill in "blank" row += [Cell(value='') for _ in range(3 - row_len)] row = [cell.value if cell.value is not None else '' for cell in row] prop_name, allowed_value, description = [str(val) for val in row[0:3]] if allowed_value and not prop_name: msg_format = _('Error in valid values for case type {}, row {}: missing case property') msg_val = msg_format.format(case_type, i) errors.append(msg_val) else: allowed_value_info[case_type][prop_name][allowed_value] = description prop_row_info[case_type][prop_name].append(i) else: worksheets.append(worksheet) for worksheet in worksheets: if worksheet.title == FHIR_RESOURCE_TYPE_MAPPING_SHEET: if import_fhir_data: _errors, fhir_resource_type_by_case_type = _process_fhir_resource_type_mapping_sheet( domain, worksheet) errors.extend(_errors) continue case_type = worksheet.title for (i, row) in enumerate(itertools.islice(worksheet.iter_rows(), 1, None), start=2): if len(row) < expected_columns_in_prop_sheet: error = _('Not enough columns') else: error, fhir_resource_prop_path, fhir_resource_type, remove_path = None, None, None, None name, group, data_type_display, description, deprecated = [cell.value for cell in row[:5]] # Fall back to value from file if data_type_display is not found in the map. # This allows existing error path to report accurately the value that isn't found, # and also has a side-effect of allowing older files (pre change to export # display values) to import successfully. data_type = data_type_map.get(data_type_display, data_type_display) seen_props[case_type].add(name) if import_fhir_data: fhir_resource_prop_path, remove_path = row[5:] remove_path = remove_path == 'Y' if remove_path else False fhir_resource_type = fhir_resource_type_by_case_type.get(case_type) if fhir_resource_prop_path and not fhir_resource_type: error = _('Could not find resource type for {}').format(case_type) if not error: if case_type in allowed_value_info: allowed_values = allowed_value_info[case_type][name] else: allowed_values = None missing_valid_values.add(case_type) error = save_case_property(name, case_type, domain, data_type, description, group, deprecated, fhir_resource_prop_path, fhir_resource_type, remove_path, allowed_values) if error: errors.append(_('Error in case type {}, row {}: {}').format(case_type, i, error)) for case_type in missing_valid_values: errors.append(_('Missing valid values sheet for case type {}').format(case_type)) for case_type in allowed_value_info: for prop_name in allowed_value_info[case_type]: if prop_name not in seen_props[case_type]: msg_format = _( 'Error in valid values for case type {}, nonexistent property listed ({}), row(s): {}') msg_val = msg_format.format( case_type, prop_name, ', '.join(str(v) for v in prop_row_info[case_type][prop_name])) errors.append(msg_val) return errors
def build_form_multimedia_zip(domain, xmlns, startdate, enddate, app_id, export_id, zip_name, download_id): def find_question_id(form, value): for k, v in form.iteritems(): if isinstance(v, dict): ret = find_question_id(v, value) if ret: return [k] + ret else: if v == value: return [k] return None def filename(form_info, question_id, extension): fname = u"%s-%s-%s-%s%s" if form_info["cases"]: fname = u"-".join(form_info["cases"]) + u"-" + fname return fname % (form_info["name"], unidecode(question_id), form_info["user"], form_info["id"], extension) case_ids = set() def extract_form_info(form, properties=None, case_ids=case_ids): unknown_number = 0 meta = form["form"].get("meta", dict()) # get case ids case_blocks = extract_case_blocks(form) cases = {c["@case_id"] for c in case_blocks} case_ids |= cases form_info = { "form": form, "attachments": list(), "name": form["form"].get("@name", "unknown form"), "user": meta.get("username", "unknown_user"), "cases": cases, "id": form["_id"], } for k, v in form["_attachments"].iteritems(): if v["content_type"] == "text/xml": continue try: question_id = unicode(u"-".join(find_question_id(form["form"], k))) except TypeError: question_id = unicode(u"unknown" + unicode(unknown_number)) unknown_number += 1 if not properties or question_id in properties: extension = unicode(os.path.splitext(k)[1]) form_info["attachments"].append( { "size": v["length"], "name": k, "question_id": question_id, "extension": extension, "timestamp": parse(form["received_on"]).timetuple(), } ) return form_info key = [domain, app_id, xmlns] form_ids = { f["id"] for f in XFormInstance.get_db().view( "attachments/attachments", start_key=key + [startdate], end_key=key + [enddate, {}], reduce=False ) } properties = set() if export_id: schema = FormExportSchema.get(export_id) for table in schema.tables: # - in question id is replaced by . in excel exports properties |= {c.display.replace(".", "-") for c in table.columns} if not app_id: zip_name = "Unrelated Form" forms_info = list() for form in iter_docs(XFormInstance.get_db(), form_ids): if not zip_name: zip_name = unidecode(form["form"].get("@name", "unknown form")) forms_info.append(extract_form_info(form, properties)) num_forms = len(forms_info) DownloadBase.set_progress(build_form_multimedia_zip, 0, num_forms) # get case names case_id_to_name = {c: c for c in case_ids} for case in iter_docs(CommCareCase.get_db(), case_ids): if case["name"]: case_id_to_name[case["_id"]] = case["name"] use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled if use_transfer: params = "_".join(map(str, [xmlns, startdate, enddate, export_id, num_forms])) fname = "{}-{}".format(app_id, hashlib.md5(params).hexdigest()) fpath = os.path.join(settings.SHARED_DRIVE_CONF.transfer_dir, fname) else: _, fpath = tempfile.mkstemp() if not (os.path.isfile(fpath) and use_transfer): # Don't rebuild the file if it is already there with open(fpath, "wb") as zfile: with zipfile.ZipFile(zfile, "w") as z: for form_number, form_info in enumerate(forms_info): f = XFormInstance.wrap(form_info["form"]) form_info["cases"] = {case_id_to_name[case_id] for case_id in form_info["cases"]} for a in form_info["attachments"]: fname = filename(form_info, a["question_id"], a["extension"]) zi = zipfile.ZipInfo(fname, a["timestamp"]) z.writestr(zi, f.fetch_attachment(a["name"], stream=True).read(), zipfile.ZIP_STORED) DownloadBase.set_progress(build_form_multimedia_zip, form_number + 1, num_forms) common_kwargs = dict( mimetype="application/zip", content_disposition='attachment; filename="{fname}.zip"'.format(fname=zip_name), download_id=download_id, ) if use_transfer: expose_file_download(fpath, use_transfer=use_transfer, **common_kwargs) else: expose_cached_download( FileWrapper(open(fpath)), expiry=(1 * 60 * 60), file_extension=file_extention_from_filename(fpath), **common_kwargs ) DownloadBase.set_progress(build_form_multimedia_zip, num_forms, num_forms)
def build_form_multimedia_zip(domain, xmlns, startdate, enddate, app_id, export_id, zip_name, download_id): def find_question_id(form, value): for k, v in form.iteritems(): if isinstance(v, dict): ret = find_question_id(v, value) if ret: return [k] + ret else: if v == value: return [k] return None def filename(form_info, question_id, extension): fname = u"%s-%s-%s-%s%s" if form_info['cases']: fname = u'-'.join(form_info['cases']) + u'-' + fname return fname % (form_info['name'], unidecode(question_id), form_info['user'], form_info['id'], extension) case_ids = set() def extract_form_info(form, properties=None, case_ids=case_ids): unknown_number = 0 meta = form['form'].get('meta', dict()) # get case ids case_blocks = extract_case_blocks(form) cases = {c['@case_id'] for c in case_blocks} case_ids |= cases form_info = { 'form': form, 'attachments': list(), 'name': form['form'].get('@name', 'unknown form'), 'user': meta.get('username', 'unknown_user'), 'cases': cases, 'id': form['_id'] } for k, v in form['_attachments'].iteritems(): if v['content_type'] == 'text/xml': continue try: question_id = unicode(u'-'.join(find_question_id(form['form'], k))) except TypeError: question_id = unicode(u'unknown' + unicode(unknown_number)) unknown_number += 1 if not properties or question_id in properties: extension = unicode(os.path.splitext(k)[1]) form_info['attachments'].append({ 'size': v['length'], 'name': k, 'question_id': question_id, 'extension': extension, 'timestamp': parse(form['received_on']).timetuple(), }) return form_info key = [domain, app_id, xmlns] form_ids = {f['id'] for f in XFormInstance.get_db().view("attachments/attachments", start_key=key + [startdate], end_key=key + [enddate, {}], reduce=False)} properties = set() if export_id: schema = FormExportSchema.get(export_id) for table in schema.tables: # - in question id is replaced by . in excel exports properties |= {c.display.replace('.', '-') for c in table.columns} if not app_id: zip_name = 'Unrelated Form' forms_info = list() for form in iter_docs(XFormInstance.get_db(), form_ids): if not zip_name: zip_name = unidecode(form['form'].get('@name', 'unknown form')) forms_info.append(extract_form_info(form, properties)) num_forms = len(forms_info) DownloadBase.set_progress(build_form_multimedia_zip, 0, num_forms) # get case names case_id_to_name = {c: c for c in case_ids} for case in iter_docs(CommCareCase.get_db(), case_ids): if case['name']: case_id_to_name[case['_id']] = case['name'] use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled if use_transfer: params = '_'.join(map(str, [xmlns, startdate, enddate, export_id, num_forms])) fname = '{}-{}'.format(app_id, hashlib.md5(params).hexdigest()) fpath = os.path.join(settings.SHARED_DRIVE_CONF.transfer_dir, fname) else: _, fpath = tempfile.mkstemp() if not (os.path.isfile(fpath) and use_transfer): # Don't rebuild the file if it is already there with open(fpath, 'wb') as zfile: with zipfile.ZipFile(zfile, 'w') as z: for form_number, form_info in enumerate(forms_info): f = XFormInstance.wrap(form_info['form']) form_info['cases'] = {case_id_to_name[case_id] for case_id in form_info['cases']} for a in form_info['attachments']: fname = filename(form_info, a['question_id'], a['extension']) zi = zipfile.ZipInfo(fname, a['timestamp']) z.writestr(zi, f.fetch_attachment(a['name'], stream=True).read(), zipfile.ZIP_STORED) DownloadBase.set_progress(build_form_multimedia_zip, form_number + 1, num_forms) common_kwargs = dict( mimetype='application/zip', content_disposition='attachment; filename="{fname}.zip"'.format(fname=zip_name), download_id=download_id, ) if use_transfer: expose_file_download( fpath, use_transfer=use_transfer, **common_kwargs ) else: expose_cached_download( FileWrapper(open(fpath)), expiry=(1 * 60 * 60), file_extension=file_extention_from_filename(fpath), **common_kwargs ) DownloadBase.set_progress(build_form_multimedia_zip, num_forms, num_forms)
def build_application_zip(include_multimedia_files, include_index_files, app, download_id, build_profile_id=None, compress_zip=False, filename="commcare.zip", download_targeted_version=False): from corehq.apps.hqmedia.views import iter_app_files DownloadBase.set_progress(build_application_zip, 0, 100) initial_progress = 10 # early on indicate something is happening file_progress = 50.0 # arbitrarily say building files takes half the total time errors = [] compression = zipfile.ZIP_DEFLATED if compress_zip else zipfile.ZIP_STORED use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled if use_transfer: fpath = os.path.join(settings.SHARED_DRIVE_CONF.transfer_dir, "{}{}{}{}{}".format( app._id, 'mm' if include_multimedia_files else '', 'ccz' if include_index_files else '', app.version, build_profile_id )) if download_targeted_version: fpath += '-targeted' else: _, fpath = tempfile.mkstemp() DownloadBase.set_progress(build_application_zip, initial_progress, 100) if not (os.path.isfile(fpath) and use_transfer): # Don't rebuild the file if it is already there files, errors, file_count = iter_app_files( app, include_multimedia_files, include_index_files, build_profile_id, download_targeted_version=download_targeted_version, ) with open(fpath, 'wb') as tmp: with zipfile.ZipFile(tmp, "w") as z: progress = initial_progress for path, data in files: # don't compress multimedia files extension = os.path.splitext(path)[1] file_compression = zipfile.ZIP_STORED if extension in MULTIMEDIA_EXTENSIONS else compression z.writestr(path, data, file_compression) progress += file_progress / file_count DownloadBase.set_progress(build_application_zip, progress, 100) else: DownloadBase.set_progress(build_application_zip, initial_progress + file_progress, 100) common_kwargs = { 'mimetype': 'application/zip' if compress_zip else 'application/x-zip-compressed', 'content_disposition': 'attachment; filename="{fname}"'.format(fname=filename), 'download_id': download_id, 'expiry': (1 * 60 * 60), } if use_transfer: expose_file_download( fpath, use_transfer=use_transfer, **common_kwargs ) else: expose_cached_download( FileWrapper(open(fpath, 'rb')), file_extension=file_extention_from_filename(filename), **common_kwargs ) DownloadBase.set_progress(build_application_zip, 100, 100) return { "errors": errors, }
def get_tempfile_ref_for_contents(self, identifier): filename = self.get_filename(identifier) suffix = file_extention_from_filename(filename) content = get_blob_db().get(key=identifier).read() return make_temp_file(content, suffix)
def build_application_zip(include_multimedia_files, include_index_files, app, download_id, build_profile_id=None, compress_zip=False, filename="commcare.zip", download_targeted_version=False): from corehq.apps.hqmedia.views import iter_app_files DownloadBase.set_progress(build_application_zip, 0, 100) initial_progress = 10 # early on indicate something is happening file_progress = 50.0 # arbitrarily say building files takes half the total time errors = [] compression = zipfile.ZIP_DEFLATED if compress_zip else zipfile.ZIP_STORED use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled if use_transfer: fpath = os.path.join( settings.SHARED_DRIVE_CONF.transfer_dir, "{}{}{}{}{}".format(app._id, 'mm' if include_multimedia_files else '', 'ccz' if include_index_files else '', app.version, build_profile_id)) if download_targeted_version: fpath += '-targeted' else: _, fpath = tempfile.mkstemp() DownloadBase.set_progress(build_application_zip, initial_progress, 100) if not (os.path.isfile(fpath) and use_transfer): # Don't rebuild the file if it is already there files, errors, file_count = iter_app_files( app, include_multimedia_files, include_index_files, build_profile_id, download_targeted_version=download_targeted_version, ) with open(fpath, 'wb') as tmp: with zipfile.ZipFile(tmp, "w") as z: progress = initial_progress for path, data in files: # don't compress multimedia files extension = os.path.splitext(path)[1] file_compression = zipfile.ZIP_STORED if extension in MULTIMEDIA_EXTENSIONS else compression z.writestr(path, data, file_compression) progress += file_progress / file_count DownloadBase.set_progress(build_application_zip, progress, 100) else: DownloadBase.set_progress(build_application_zip, initial_progress + file_progress, 100) common_kwargs = { 'mimetype': 'application/zip' if compress_zip else 'application/x-zip-compressed', 'content_disposition': 'attachment; filename="{fname}"'.format(fname=filename), 'download_id': download_id, 'expiry': (1 * 60 * 60), } if use_transfer: expose_file_download(fpath, use_transfer=use_transfer, **common_kwargs) else: expose_cached_download( FileWrapper(open(fpath, 'rb')), file_extension=file_extention_from_filename(filename), **common_kwargs) DownloadBase.set_progress(build_application_zip, 100, 100) return { "errors": errors, }
def get_tempfile_ref_for_contents(self, identifier): filename = self.get_filename(identifier) suffix = file_extention_from_filename(filename) content = self._db.get(identifier, bucket=self._bucket).read() return make_temp_file(content, suffix)
def excel_config(request, domain): """ Step one of three. This is the initial post when the user uploads the excel file named_columns: Whether or not the first row of the excel sheet contains header strings for the columns. This defaults to True and should potentially not be an option as it is always used due to how important it is to see column headers in the rest of the importer. """ if request.method != 'POST': return HttpResponseRedirect(base.ImportCases.get_url(domain=domain)) if not request.FILES: return render_error(request, domain, 'Please choose an Excel file to import.') named_columns = request.POST.get('named_columns') == "on" uploaded_file_handle = request.FILES['file'] extension = os.path.splitext(uploaded_file_handle.name)[1][1:].strip().lower() # NOTE: We may not always be able to reference files from subsequent # views if your worker changes, so we have to store it elsewhere # using the soil framework. if extension not in importer_util.ExcelFile.ALLOWED_EXTENSIONS: return render_error(request, domain, 'The Excel file you chose could not be processed. ' 'Please check that it is saved as a Microsoft ' 'Excel 97/2000 .xls file.') # stash content in the default storage for subsequent views file_ref = expose_cached_download( uploaded_file_handle.read(), expiry=1*60*60, file_extension=file_extention_from_filename(uploaded_file_handle.name), ) request.session[EXCEL_SESSION_ID] = file_ref.download_id try: spreadsheet = importer_util.get_spreadsheet(file_ref, named_columns) except ImporterError as e: return render_error(request, domain, get_importer_error_message(e)) columns = spreadsheet.get_header_columns() row_count = spreadsheet.get_num_rows() if row_count == 0: return render_error(request, domain, 'Your spreadsheet is empty. ' 'Please try again with a different spreadsheet.') case_types_from_apps = get_case_types_from_apps(domain) unrecognized_case_types = [t for t in CaseAccessors(domain).get_case_types() if t not in case_types_from_apps] if len(case_types_from_apps) == 0 and len(unrecognized_case_types) == 0: return render_error( request, domain, 'No cases have been submitted to this domain and there are no ' 'applications yet. You cannot import case details from an Excel ' 'file until you have existing cases or applications.' ) return render( request, "importer/excel_config.html", { 'named_columns': named_columns, 'columns': columns, 'unrecognized_case_types': unrecognized_case_types, 'case_types_from_apps': case_types_from_apps, 'domain': domain, 'report': { 'name': 'Import: Configuration' }, 'slug': base.ImportCases.slug } )
def build_application_zip(include_multimedia_files, include_index_files, app, download_id, build_profile_id=None, compress_zip=False, filename="commcare.zip", download_targeted_version=False): from corehq.apps.hqmedia.views import iter_app_files DownloadBase.set_progress(build_application_zip, 0, 100) initial_progress = 10 # early on indicate something is happening file_progress = 50.0 # arbitrarily say building files takes half the total time errors = [] compression = zipfile.ZIP_DEFLATED if compress_zip else zipfile.ZIP_STORED use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled if use_transfer: fpath = os.path.join(settings.SHARED_DRIVE_CONF.transfer_dir, "{}{}{}{}{}".format( app._id, 'mm' if include_multimedia_files else '', 'ccz' if include_index_files else '', app.version, build_profile_id )) if download_targeted_version: fpath += '-targeted' else: dummy, fpath = tempfile.mkstemp() DownloadBase.set_progress(build_application_zip, initial_progress, 100) if not (os.path.isfile(fpath) and use_transfer): # Don't rebuild the file if it is already there files, errors, file_count = iter_app_files( app, include_multimedia_files, include_index_files, build_profile_id, download_targeted_version=download_targeted_version, ) if toggles.CAUTIOUS_MULTIMEDIA.enabled(app.domain): manifest = json.dumps({ 'include_multimedia_files': include_multimedia_files, 'include_index_files': include_index_files, 'download_id': download_id, 'build_profile_id': build_profile_id, 'compress_zip': compress_zip, 'filename': filename, 'download_targeted_version': download_targeted_version, 'app': app.to_json(), }, indent=4) files = itertools.chain(files, [('manifest.json', manifest)]) with open(fpath, 'wb') as tmp: with zipfile.ZipFile(tmp, "w") as z: progress = initial_progress for path, data in files: # don't compress multimedia files extension = os.path.splitext(path)[1] file_compression = zipfile.ZIP_STORED if extension in MULTIMEDIA_EXTENSIONS else compression z.writestr(path, data, file_compression) progress += file_progress / file_count DownloadBase.set_progress(build_application_zip, progress, 100) # Integrity check that all media files present in media_suite.xml were added to the zip if include_multimedia_files and include_index_files and toggles.CAUTIOUS_MULTIMEDIA.enabled(app.domain): with open(fpath, 'rb') as tmp: with zipfile.ZipFile(tmp, "r") as z: media_suites = [f for f in z.namelist() if re.search(r'\bmedia_suite.xml\b', f)] if len(media_suites) != 1: message = _('Could not identify media_suite.xml in CCZ') errors.append(message) else: with z.open(media_suites[0]) as media_suite: from corehq.apps.app_manager.xform import parse_xml parsed = parse_xml(media_suite.read()) resources = {node.text for node in parsed.findall("media/resource/location[@authority='local']")} names = z.namelist() missing = [r for r in resources if re.sub(r'^\.\/', '', r) not in names] errors += [_('Media file missing from CCZ: {}').format(r) for r in missing] if errors: os.remove(fpath) raise Exception('\t' + '\t'.join(errors)) else: DownloadBase.set_progress(build_application_zip, initial_progress + file_progress, 100) common_kwargs = { 'mimetype': 'application/zip' if compress_zip else 'application/x-zip-compressed', 'content_disposition': 'attachment; filename="{fname}"'.format(fname=filename), 'download_id': download_id, 'expiry': (1 * 60 * 60), } if use_transfer: expose_file_download( fpath, use_transfer=use_transfer, **common_kwargs ) else: expose_cached_download( FileWrapper(open(fpath, 'rb')), file_extension=file_extention_from_filename(filename), **common_kwargs ) DownloadBase.set_progress(build_application_zip, 100, 100)
def get_tempfile_ref_for_contents(self, identifier): filename = self.get_filename(identifier) suffix = file_extention_from_filename(filename) content = get_blob_db().get(key=identifier).read() return make_temp_file(content, suffix)
def excel_config(request, domain): """ Step one of three. This is the initial post when the user uploads the excel file named_columns: Whether or not the first row of the excel sheet contains header strings for the columns. This defaults to True and should potentially not be an option as it is always used due to how important it is to see column headers in the rest of the importer. """ if request.method != 'POST': return HttpResponseRedirect(base.ImportCases.get_url(domain=domain)) if not request.FILES: return render_error(request, domain, 'Please choose an Excel file to import.') named_columns = request.POST.get('named_columns') == "on" uploaded_file_handle = request.FILES['file'] extension = os.path.splitext(uploaded_file_handle.name)[1][1:].strip().lower() # NOTE: We may not always be able to reference files from subsequent # views if your worker changes, so we have to store it elsewhere # using the soil framework. if extension not in importer_util.ExcelFile.ALLOWED_EXTENSIONS: return render_error(request, domain, 'The Excel file you chose could not be processed. ' 'Please check that it is saved as a Microsoft ' 'Excel 97/2000 .xls file.') # stash content in the default storage for subsequent views file_ref = expose_cached_download( uploaded_file_handle.read(), expiry=1*60*60, file_extension=file_extention_from_filename(uploaded_file_handle.name), ) request.session[EXCEL_SESSION_ID] = file_ref.download_id spreadsheet = importer_util.get_spreadsheet(file_ref, named_columns) if not spreadsheet: return _spreadsheet_expired(request, domain) columns = spreadsheet.get_header_columns() row_count = spreadsheet.get_num_rows() if row_count == 0: return render_error(request, domain, 'Your spreadsheet is empty. ' 'Please try again with a different spreadsheet.') case_types_from_apps = [] # load types from all modules for row in ApplicationBase.view( 'app_manager/types_by_module', reduce=True, group=True, startkey=[domain], endkey=[domain, {}] ).all(): if not row['key'][1] in case_types_from_apps: case_types_from_apps.append(row['key'][1]) case_types_from_cases = get_case_types_for_domain(domain) # for this we just want cases that have data but aren't being used anymore case_types_from_cases = filter(lambda x: x not in case_types_from_apps, case_types_from_cases) if len(case_types_from_apps) == 0 and len(case_types_from_cases) == 0: return render_error( request, domain, 'No cases have been submitted to this domain and there are no ' 'applications yet. You cannot import case details from an Excel ' 'file until you have existing cases or applications.' ) return render( request, "importer/excel_config.html", { 'named_columns': named_columns, 'columns': columns, 'case_types_from_cases': case_types_from_cases, 'case_types_from_apps': case_types_from_apps, 'domain': domain, 'report': { 'name': 'Import: Configuration' }, 'slug': base.ImportCases.slug } )