def test_revert_new_exports_restore_old(self): saved_export_schema = SavedExportSchema(index=['my-domain', 'xmlns']) saved_export_schema.doc_type += DELETED_SUFFIX saved_export_schema.save() self.new_exports[0].legacy_saved_export_schema_id = saved_export_schema._id reverted = revert_new_exports(self.new_exports) self.assertEqual(len(reverted), 1) self.assertFalse(reverted[0].doc_type.endswith(DELETED_SUFFIX)) saved_export_schema.delete()
def response_deidentified_data(domain, new_plan_version): """ De-id exports will be hidden """ startkey = json.dumps([domain.name, ""])[:-3] endkey = "%s{" % startkey reports = SavedExportSchema.view( "couchexport/saved_export_schemas", startkey=startkey, endkey=endkey, include_docs=True, reduce=False, ) num_deid_reports = len(filter(lambda r: r.is_safe, reports)) if num_deid_reports > 0: return _fmt_alert( ungettext( "You have %(num)d De-Identified Export. Selecting this " "plan will remove it.", "You have %(num)d De-Identified Exports. Selecting this " "plan will remove them.", num_deid_reports ) % { 'num': num_deid_reports, } )
def export_data_shared(export_tag, format=None, filename=None, previous_export_id=None, filter=None, use_cache=True, max_column_size=2000, separator='|'): """ Shared method for export. If there is data, return an HTTPResponse with the appropriate data. If there is not data returns None. """ if previous_export_id and not SavedExportSchema.get_db().doc_exist( previous_export_id): return HttpResponseNotFound( _('No previous export with id "{id}" found'.format( id=previous_export_id))) if not filename: filename = export_tag files = DefaultExportSchema(index=export_tag).get_export_files( format=format, previous_export_id=previous_export_id, filter=filter, use_cache=use_cache, max_column_size=max_column_size, separator=separator) if files and files.checkpoint: return export_response(files.file, format, filename, files.checkpoint) else: return None
def test_repeat_conversion(self, _): saved_export_schema = SavedExportSchema.wrap(self.get_json('repeat')) with mock.patch( 'corehq.apps.export.models.new.FormExportDataSchema.generate_schema_from_builds', return_value=self.schema): instance = convert_saved_export_to_export_instance(self.domain, saved_export_schema) self.assertEqual(instance.name, 'Repeat Tester') table = instance.get_table([PathNode(name='form'), PathNode(name='repeat', is_repeat=True)]) self.assertEqual(table.label, 'Repeat: question1') self.assertTrue(table.selected) index, column = table.get_column( [PathNode(name='form'), PathNode(name='repeat', is_repeat=True), PathNode(name='question2')], 'ExportItem', None ) self.assertEqual(column.label, 'Question Two') self.assertEqual(column.selected, True) index, column = table.get_column( [PathNode(name='number')], 'ExportItem', None ) self.assertEqual(column.selected, True)
def test_system_property_conversion(self, _): saved_export_schema = SavedExportSchema.wrap(self.get_json('system_properties')) with mock.patch( 'corehq.apps.export.models.new.FormExportDataSchema.generate_schema_from_builds', return_value=self.schema): instance = convert_saved_export_to_export_instance(self.domain, saved_export_schema) self.assertEqual(instance.name, 'System Properties') # Check for first repeat table table = instance.get_table(MAIN_TABLE) self.assertEqual(table.label, 'Forms') expected_paths = [ ([PathNode(name='xmlns')], None, True), ([PathNode(name='form'), PathNode(name='meta'), PathNode(name='userID')], None, True), ([PathNode(name='form'), PathNode(name='case'), PathNode(name='@case_id')], None, True), ( [PathNode(name='form'), PathNode(name='case'), PathNode(name='@case_id')], CASE_NAME_TRANSFORM, True ), ] for path, transform, selected in expected_paths: index, column = table.get_column(path, 'ExportItem', transform) self.assertEqual(column.selected, selected, '{} selected is not {}'.format(path, selected))
def test_basic_conversion(self, _): saved_export_schema = SavedExportSchema.wrap(self.get_json('basic')) with mock.patch( 'corehq.apps.export.models.new.FormExportDataSchema.generate_schema_from_builds', return_value=self.schema): instance = convert_saved_export_to_export_instance(self.domain, saved_export_schema) self.assertEqual(instance.split_multiselects, False) self.assertEqual(instance.transform_dates, True) self.assertEqual(instance.name, 'Tester') self.assertEqual(instance.export_format, 'csv') self.assertEqual(instance.is_deidentified, False) self.assertEqual(instance.include_errors, False) table = instance.get_table(MAIN_TABLE) self.assertEqual(table.label, 'My Forms') index, column = table.get_column( [PathNode(name='form'), PathNode(name='question1')], 'ExportItem', None, ) self.assertEqual(column.label, 'Question One') self.assertEqual(column.selected, True)
def export_data_shared(export_tag, format=None, filename=None, previous_export_id=None, filter=None, use_cache=True, max_column_size=2000, separator='|'): """ Shared method for export. If there is data, return an HTTPResponse with the appropriate data. If there is not data returns None. """ if previous_export_id and not SavedExportSchema.get_db().doc_exist(previous_export_id): return HttpResponseNotFound( _('No previous export with id "{id}" found'.format(id=previous_export_id))) if not filename: filename = export_tag files = DefaultExportSchema(index=export_tag).get_export_files( format=format, previous_export_id=previous_export_id, filter=filter, use_cache=use_cache, max_column_size=max_column_size, separator=separator ) if files and files.checkpoint: return export_response(files.file, format, filename, files.checkpoint) else: return None
def _convert_case_export(self, export_file_name): saved_export_schema = SavedExportSchema.wrap(self.get_json(export_file_name)) with mock.patch( 'corehq.apps.export.models.new.CaseExportDataSchema.generate_schema_from_builds', return_value=self.schema): instance, meta = convert_saved_export_to_export_instance(self.domain, saved_export_schema) return instance, meta
def setUp(self): self.db = get_db('couchexport') self.custom_export = SavedExportSchema.wrap({ 'type': 'demo', 'default_format': Format.JSON, 'index': json.dumps(['test_custom']), 'tables': [{ 'index': '#', 'display': 'Export', 'columns': [{ 'index': 'multi', 'display': 'Split', 'doc_type': 'SplitColumn', 'options': ['a', 'b', 'c', 'd'] }], }] }) self.custom_export.filter_function = SerializableFunction() self.schema = [{ '#export_tag': ['string'], 'tag': 'string', 'multi': 'string' }]
def show_in_navigation(cls, request, domain=None): startkey = json.dumps([domain, ""])[:-3] return SavedExportSchema.view("couchexport/saved_export_schemas", startkey=startkey, limit=1, include_docs=False, stale='update_after', ).count() > 0
def get_saved_exports(self): startkey = json.dumps([self.domain, ""])[:-3] endkey = "%s{" % startkey exports = SavedExportSchema.view("couchexport/saved_export_schemas", startkey=startkey, endkey=endkey, include_docs=True).all() exports = filter(lambda x: x.type == "case", exports) return exports
def show_in_navigation(cls, domain=None, project=None, user=None): startkey = json.dumps([domain, ""])[:-3] return SavedExportSchema.view("couchexport/saved_export_schemas", startkey=startkey, limit=1, include_docs=False, stale=settings.COUCH_STALE_QUERY, ).count() > 0
def commit(self, request): try: saved_export = SavedExportSchema.get(self.export_id) except ResourceNotFound: raise ExportNotFound() self.export_type = saved_export.type saved_export.delete() messages.success(request, _("Custom export was deleted."))
def commit(self, request): try: saved_export = SavedExportSchema.get(self.export_id) except ResourceNotFound: raise ExportNotFound() self.export_type = saved_export.type saved_export.delete() touch_exports(self.domain) messages.success(request, mark_safe(_("Export <strong>{}</strong> " "was deleted.").format(saved_export.name)))
def show_in_navigation(cls, request, *args, **kwargs): domain = kwargs.get("domain") startkey = json.dumps([domain, ""])[:-3] return ( SavedExportSchema.view( "couchexport/saved_export_schemas", startkey=startkey, limit=1, include_docs=False ).all() > 0 )
def post(self, request, *args, **kwargs): try: saved_export = SavedExportSchema.get(self.export_id) except ResourceNotFound: return HttpResponseRedirect(request.META['HTTP_REFERER']) self.export_type = saved_export.type saved_export.delete() messages.success(request, _("Custom export was deleted.")) return super(DeleteCustomExportView, self).post(request, *args, **kwargs)
def _convert_export(self, mock_path, export_file_name, force=False): saved_export_schema = SavedExportSchema.wrap(self.get_json(export_file_name)) with mock.patch.object(SavedExportSchema, "save", return_value="False Save"): with mock.patch(mock_path, return_value=self.schema): instance, meta = convert_saved_export_to_export_instance( self.domain, saved_export_schema, force_convert_columns=force ) return instance, meta
def test_stock_conversion(self, _): saved_export_schema = SavedExportSchema.wrap(self.get_json('stock')) with mock.patch( 'corehq.apps.export.models.new.CaseExportDataSchema.generate_schema_from_builds', return_value=self.schema): instance = convert_saved_export_to_export_instance(self.domain, saved_export_schema) table = instance.get_table(MAIN_TABLE) path = [PathNode(name='stock')] index, column = table.get_column(path, 'ExportItem', None) self.assertTrue(column.selected)
def delete_custom_export(req, domain, export_id): """ Delete a custom export """ try: saved_export = SavedExportSchema.get(export_id) except ResourceNotFound: return HttpResponseRedirect(req.META['HTTP_REFERER']) saved_export.delete() messages.success(req, "Custom export was deleted.") return _redirect_to_export_home(saved_export.type, domain)
def handle(self, *args, **options): if len(args) != 1: raise CommandError("Syntax: ./manage.py migrate_export_types [default type]!") default_type = args[0] for export in SavedExportSchema.view("couchexport/saved_export_schemas", include_docs=True): if not export.type: print "migrating %s" % export export.type = default_type if not options["dryrun"]: export.save() print "Done!"
def handle(self, *args, **options): if len(args) != 1: raise CommandError( "Syntax: ./manage.py migrate_export_types [default type]!") default_type = args[0] for export in SavedExportSchema.view( "couchexport/saved_export_schemas", include_docs=True): if not export.type: print "migrating %s" % export export.type = default_type if not options['dryrun']: export.save() print "Done!"
def _convert_export(self, mock_path, export_file_name, force=False): saved_export_schema = SavedExportSchema.wrap(self.get_json(export_file_name)) with mock.patch.object(SavedExportSchema, 'save', return_value='False Save'): with mock.patch( mock_path, return_value=self.schema): instance, meta = convert_saved_export_to_export_instance( self.domain, saved_export_schema, force_convert_columns=force, ) return instance, meta
def test_repeat_conversion(self): saved_export_schema = SavedExportSchema.wrap(self.get_json('repeat')) with mock.patch( 'corehq.apps.export.models.new.FormExportDataSchema.generate_schema_from_builds', return_value=self.schema): instance = convert_saved_export_to_export_instance(saved_export_schema) self.assertEqual(instance.name, 'Repeat Tester') table = instance.get_table(['data', 'repeat']) self.assertEqual(table.display_name, 'Repeat: question1') column = table.get_column(['data', 'repeat', 'question2']) self.assertEqual(column.label, 'Question Two') self.assertEqual(column.selected, True)
def migrate_domain(domain, dryrun=False): from couchexport.models import SavedExportSchema export_count = stale_get_export_count(domain) metas = [] if export_count: for old_export in with_progress_bar(stale_get_exports_json(domain), length=export_count, prefix=domain): try: _, migration_meta = convert_saved_export_to_export_instance( domain, SavedExportSchema.wrap(old_export), dryrun=dryrun) except Exception, e: print 'Failed parsing {}: {}'.format(old_export['_id'], e) else: metas.append(migration_meta)
def handle(self, *args, **options): if len(args) != 0: raise CommandError("This command doesn't expect arguments!") for export in SavedExportSchema.view("couchexport/saved_export_schemas", include_docs=True): print "migrating %s" % export assert len(export.tables) == 1, "there should only be 1 root table!" [table] = export.tables if table.index == OLD_ROOT_INDEX: table.index = NEW_ROOT_INDEX if not options["dryrun"]: export.save() print "migrated" else: print "nothing to do"
def delete_custom_export(req, domain, export_id): """ Delete a custom export """ try: saved_export = SavedExportSchema.get(export_id) except ResourceNotFound: return HttpResponseRedirect(req.META['HTTP_REFERER']) type = saved_export.type saved_export.delete() messages.success(req, "Custom export was deleted.") if type == "form": return HttpResponseRedirect(export.ExcelExportReport.get_url(domain)) else: return HttpResponseRedirect(export.CaseExportReport.get_url(domain))
def setUp(self): self.db = get_db('couchexport') self.custom_export = SavedExportSchema.wrap({ 'type': 'demo', 'default_format': Format.JSON, 'index': json.dumps(['test_custom']), 'tables': [{ 'index': '#', 'display': 'Export', 'columns': [ {'index': 'multi', 'display': 'Split', 'doc_type': 'SplitColumn', 'options': ['a', 'b', 'c', 'd']} ], }] }) self.custom_export.filter_function = SerializableFunction() self.schema = [{'#export_tag': [u'string'], 'tag': u'string', 'multi': u'string'}]
def test_transform_conversion(self, _): saved_export_schema = SavedExportSchema.wrap(self.get_json('deid_transforms')) with mock.patch( 'corehq.apps.export.models.new.FormExportDataSchema.generate_schema_from_builds', return_value=self.schema): instance = convert_saved_export_to_export_instance(self.domain, saved_export_schema) table = instance.get_table(MAIN_TABLE) index, column = table.get_column( [PathNode(name='form'), PathNode(name='deid_id')], 'ExportItem', None ) self.assertEqual(column.deid_transform, DEID_ID_TRANSFORM) index, column = table.get_column( [PathNode(name='form'), PathNode(name='deid_date')], 'ExportItem', None ) self.assertEqual(column.deid_transform, DEID_DATE_TRANSFORM)
def test_convert_form_export_stock_basic(self, _, __): saved_export_schema = SavedExportSchema.wrap(self.get_json('stock_form_export')) with mock.patch( 'corehq.apps.export.models.new.FormExportDataSchema.generate_schema_from_builds', return_value=self.schema): instance, _ = convert_saved_export_to_export_instance(self.domain, saved_export_schema) table = instance.get_table(MAIN_TABLE) index, column = table.get_column( [ PathNode(name='form'), PathNode(name='transfer:questionid'), PathNode(name='entry'), PathNode(name='@id'), ], 'StockItem', None, ) self.assertTrue(column.selected)
def test_basic_conversion(self, _): saved_export_schema = SavedExportSchema.wrap(self.get_json('case')) with mock.patch( 'corehq.apps.export.models.new.CaseExportDataSchema.generate_schema_from_builds', return_value=self.schema): instance = convert_saved_export_to_export_instance(self.domain, saved_export_schema) self.assertEqual(instance.transform_dates, True) self.assertEqual(instance.name, 'Case Example') self.assertEqual(instance.export_format, 'csv') self.assertEqual(instance.is_deidentified, False) table = instance.get_table(MAIN_TABLE) self.assertEqual(table.label, 'Cases') self.assertTrue(table.selected) index, column = table.get_column([PathNode(name='DOB')], 'ExportItem', None) self.assertEqual(column.label, 'DOB Saved') self.assertEqual(column.selected, True)
def test_case_history_conversion(self, _): saved_export_schema = SavedExportSchema.wrap(self.get_json('case_history')) with mock.patch( 'corehq.apps.export.models.new.CaseExportDataSchema.generate_schema_from_builds', return_value=self.schema): instance = convert_saved_export_to_export_instance(self.domain, saved_export_schema) table = instance.get_table(CASE_HISTORY_TABLE) self.assertEqual(table.label, 'Case History') expected_paths = [ ([PathNode(name='actions', is_repeat=True), PathNode(name='action_type')], True), ([PathNode(name='number')], True), ([PathNode(name='actions', is_repeat=True), PathNode(name='server_date')], True), ([PathNode(name='actions', is_repeat=True), PathNode(name='xform_name')], True), ] for path, selected in expected_paths: index, column = table.get_column(path, 'ExportItem', None) self.assertEqual(column.selected, selected, '{} selected is not {}'.format(path, selected))
def migrate_domain(domain, dryrun=False, force_convert_columns=False): from couchexport.models import SavedExportSchema export_count = stale_get_export_count(domain) metas = [] if export_count: for old_export in with_progress_bar( stale_get_exports_json(domain), length=export_count, prefix=domain): try: _, migration_meta = convert_saved_export_to_export_instance( domain, SavedExportSchema.wrap(old_export), dryrun=dryrun, force_convert_columns=force_convert_columns, ) except Exception, e: print 'Failed parsing {}: {}'.format(old_export['_id'], e) raise e else: metas.append(migration_meta)
def response_deidentified_data(self): """ De-id exports will be hidden """ startkey = json.dumps([self.domain.name, ""])[:-3] endkey = "%s{" % startkey num_deid_reports = SavedExportSchema.view("couchexport/saved_export_schemas", startkey=startkey, endkey=endkey, include_docs=False, ).count() if num_deid_reports > 0: return self._fmt_alert( ungettext( "You have %(num)d De-Identified Export. Selecting this plan will remove it.", "You have %(num)d De-Identified Exports. Selecting this plan will remove them.", num_deid_reports ) % { 'num': num_deid_reports, } )
def response_deidentified_data(self): """ De-id exports will be hidden """ startkey = json.dumps([self.domain.name, ""])[:-3] endkey = "%s{" % startkey num_deid_reports = SavedExportSchema.view( "couchexport/saved_export_schemas", startkey=startkey, endkey=endkey, include_docs=False, ).count() if num_deid_reports > 0: return self._fmt_alert( ungettext( "You have %(num)d De-Identified Export. Selecting this " "plan will remove it.", "You have %(num)d De-Identified Exports. Selecting this " "plan will remove them.", num_deid_reports) % { 'num': num_deid_reports, })
def test_parent_case_conversion(self, _): saved_export_schema = SavedExportSchema.wrap(self.get_json('parent_case')) with mock.patch( 'corehq.apps.export.models.new.CaseExportDataSchema.generate_schema_from_builds', return_value=self.schema): instance = convert_saved_export_to_export_instance(self.domain, saved_export_schema) table = instance.get_table(PARENT_CASE_TABLE) self.assertEqual(table.label, 'Parent Cases') self.assertTrue(table.selected) expected_paths = [ ([PathNode(name='indices', is_repeat=True), PathNode(name='referenced_id')], True), ([PathNode(name='indices', is_repeat=True), PathNode(name='referenced_type')], False), ([PathNode(name='indices', is_repeat=True), PathNode(name='relationship')], True), ([PathNode(name='indices', is_repeat=True), PathNode(name='doc_type')], True), ] for path, selected in expected_paths: index, column = table.get_column(path, 'ExportItem', None) self.assertEqual(column.selected, selected, '{} selected is not {}'.format(path, selected))
def test_single_node_repeats(self, _, __): """ This test ensures that if a repeat only receives one entry, that the selection will still be migrated. """ saved_export_schema = SavedExportSchema.wrap(self.get_json('single_node_repeat')) with mock.patch( 'corehq.apps.export.models.new.FormExportDataSchema.generate_schema_from_builds', return_value=self.schema): instance, _ = convert_saved_export_to_export_instance(self.domain, saved_export_schema) table = instance.get_table([PathNode(name='form'), PathNode(name='repeat', is_repeat=True)]) index, column = table.get_column( [ PathNode(name='form'), PathNode(name='repeat', is_repeat=True), PathNode(name='single_answer'), ], 'ExportItem', None ) self.assertTrue(column.selected)
def migrate_domain(domain, dryrun=False, force_convert_columns=False): from couchexport.models import SavedExportSchema export_count = stale_get_export_count(domain) metas = [] if export_count: for old_export in with_progress_bar(get_exports_json(domain), length=export_count, prefix=domain): with CriticalSection(['saved-export-{}'.format(old_export['_id'])], timeout=120): try: _, migration_meta = convert_saved_export_to_export_instance( domain, SavedExportSchema.get(old_export['_id']), dryrun=dryrun, force_convert_columns=force_convert_columns, ) except Exception as e: print('Failed parsing {}: {}'.format(old_export['_id'], e)) raise else: metas.append(migration_meta) if not dryrun: set_toggle(OLD_EXPORTS.slug, domain, False, namespace=NAMESPACE_DOMAIN) toggle_js_domain_cachebuster.clear(domain) # Remote app migrations must have access to UserDefined columns and tables if any(map(lambda meta: meta.is_remote_app_migration, metas)): set_toggle(ALLOW_USER_DEFINED_EXPORT_COLUMNS.slug, domain, True, namespace=NAMESPACE_DOMAIN) toggle_js_domain_cachebuster.clear(domain) for meta in metas: if not meta.skipped_tables and not meta.skipped_columns: continue output = '* Export information for export: {} *'.format( meta.old_export_url) schema_id_output = 'Generated schema: {}'.format( meta.generated_schema_id) print('') print('*' * len(output)) print(output) print('* {}{} *'.format( schema_id_output, ' ' * (len(output) - len(schema_id_output) - 4))) print('*' * len(output)) print('') if meta.skipped_tables: print('# Skipped tables #') for table_meta in meta.skipped_tables: table_meta.pretty_print() if meta.skipped_columns: print('# Skipped columns #') for column_meta in meta.skipped_columns: column_meta.pretty_print() return metas
def tearDownClass(cls): delete_all_docs_by_doc_type(SavedExportSchema.get_db(), (SavedExportSchema.__name__, )) super(SavedExportSchemaDBTest, cls).tearDownClass()
def setUpClass(cls): super(SavedExportSchemaDBTest, cls).setUpClass() SavedExportSchema(domain='domain1', index=["domain1", "blah"]).save() SavedExportSchema(domain='domain1', index=["domain2", "blah"]).save() SavedExportSchema(domain='domain1', index=["domain2", "blah"]).save()
def exports(self): return [SavedExportSchema.get(export_id) for export_id in self.export_ids]
def exports(self): return [ SavedExportSchema.get(export_id) for export_id in self.export_ids ]
def tearDownClass(cls): delete_all_docs_by_doc_type(SavedExportSchema.get_db(), (SavedExportSchema.__name__,)) super(SavedExportSchemaDBTest, cls).tearDownClass()