def _delete_all_cases(domain_name): logger.info('Deleting cases...') case_accessor = CaseAccessors(domain_name) case_ids = case_accessor.get_case_ids_in_domain() for case_id_chunk in chunked(with_progress_bar(case_ids, stream=silence_during_tests()), 500): case_accessor.soft_delete_cases(list(case_id_chunk)) logger.info('Deleting cases complete.')
def get_duplicated_case_stubs(domain, case_type): accessor = CaseAccessors(domain) id_property = {'voucher': VOUCHER_ID, 'person': ENIKSHAY_ID}[case_type] all_case_ids = accessor.get_case_ids_in_domain(case_type) all_cases = [{ 'case_id': case.case_id, 'readable_id': case.get_case_property(id_property), 'opened_on': case.opened_on, } for case in accessor.iter_cases(all_case_ids)]
def test_cases_created(self): accessor = CaseAccessors(DOMAIN) case_ids = accessor.get_case_ids_in_domain() cases = list(accessor.get_cases(case_ids)) self.assertEqual(len(cases), 18) self.assertTrue(all( case.parent.type == FOODRECALL_CASE_TYPE for case in cases if case.type == FOOD_CASE_TYPE ))
def handle(self, domain, log_path, log_errors, **options): commit = options['commit'] accessor = CaseAccessors(domain) factory = CaseFactory(domain) headers = [ 'case_id', 'diagnosis_test_result_date', 'diagnosis_lab_facility_name', 'diagnosis_test_lab_serial_number', 'diagnosis_test_summary', 'datamigration_diagnosis_test_information', ] print("Starting {} migration on {} at {}".format( "real" if commit else "fake", domain, datetime.datetime.utcnow())) with open(log_errors, 'w') as log_errors_file: error_logger = csv.writer(log_errors_file) error_logger.writerow(['case_id']) with open(log_path, "w") as log_file: writer = csv.writer(log_file) writer.writerow(headers) for episode_case_id in accessor.get_case_ids_in_domain( type='episode'): print('Looking at {}'.format(episode_case_id)) episode_case = accessor.get_case(episode_case_id) case_properties = episode_case.dynamic_case_properties() if self.should_migrate_case(episode_case_id, case_properties, domain): test = self.get_relevant_test_case( domain, episode_case, error_logger) if test is not None: update = self.get_updates(test) print('Updating {}...'.format(episode_case_id)) writer.writerow( [episode_case_id] + [update[key] for key in headers[1:]]) if commit: try: factory.update_case( case_id=episode_case_id, update=update) except NikshayLocationNotFound: pass else: print( 'No relevant test found for episode {}'.format( episode_case_id)) else: print('Do not migrate {}'.format(episode_case_id)) print('Migration complete at {}'.format(datetime.datetime.utcnow()))
class ExplodeLedgersTest(BaseSyncTest): def setUp(self): super(ExplodeLedgersTest, self).setUp() self.case_accessor = CaseAccessors(self.project.name) self.ledger_accessor = LedgerAccessors(self.project.name) self._create_ledgers() def tearDown(self): delete_all_ledgers() delete_all_cases() delete_all_xforms() super(ExplodeLedgersTest, self).tearDown() def _create_ledgers(self): case_type = 'case' case1 = CaseStructure( case_id='case1', attrs={'create': True, 'case_type': case_type}, ) case2 = CaseStructure( case_id='case2', attrs={'create': True, 'case_type': case_type}, ) # case2 will have no ledgers self.ledgers = { 'icecream': Balance( entity_id=case1.case_id, date=datetime(2017, 11, 21, 0, 0, 0, 0), section_id='test', entry=Entry(id='icecream', quantity=4), ), 'blondie': Balance( entity_id=case1.case_id, date=datetime(2017, 11, 21, 0, 0, 0, 0), section_id='test', entry=Entry(id='blondie', quantity=5), ) } self.device.post_changes([case1, case2]) self.device.post_changes(list(self.ledgers.values())) def test_explode_ledgers(self): explode_cases(self.project.name, self.user_id, 5) cases = self.case_accessor.iter_cases(self.case_accessor.get_case_ids_in_domain()) for case in cases: ledger_values = {l.entry_id: l for l in self.ledger_accessor.get_ledger_values_for_case(case.case_id)} if case.case_id == 'case2' or case.get_case_property('cc_exploded_from') == 'case2': self.assertEqual(len(ledger_values), 0) else: self.assertEqual(len(ledger_values), len(self.ledgers)) for id, balance in six.iteritems(self.ledgers): self.assertEqual(ledger_values[id].balance, balance.entry.quantity) self.assertEqual(ledger_values[id].entry_id, balance.entry.id)
def update_cases(self, domain, case_type, user_id): accessor = CaseAccessors(domain) case_ids = accessor.get_case_ids_in_domain(case_type) print(f"Found {len(case_ids)} {case_type} cases in {domain}") case_blocks = [] skip_count = 0 for case in accessor.iter_cases(case_ids): if should_skip(case): skip_count += 1 elif needs_update(case): case_blocks.append(self.case_block(case))
def update_cases(domain): accessor = CaseAccessors(domain) case_ids = accessor.get_case_ids_in_domain(CASE_TYPE) print(f"Found {len(case_ids)} {CASE_TYPE} cases in {domain}") case_blocks = [] skip_count = 0 for case in accessor.iter_cases(case_ids): if should_skip(case): skip_count += 1 elif needs_update(case): case_blocks.append(case_block(case))
def duplicate_ids_report(request, domain, case_type): case_type = { 'voucher': CASE_TYPE_VOUCHER, 'person': CASE_TYPE_PERSON }[case_type] id_property = {'voucher': VOUCHER_ID, 'person': ENIKSHAY_ID}[case_type] accessor = CaseAccessors(domain) case_ids = accessor.get_case_ids_in_domain(case_type) all_cases = [{ 'case_id': case.case_id, 'readable_id': case.get_case_property(id_property), 'opened_on': case.opened_on, } for case in accessor.iter_cases(case_ids)]
def get_case_blocks() -> Iterable[CaseBlock]: case_accessors = CaseAccessors(DOMAIN) for case_id in case_accessors.get_case_ids_in_domain(type=CASE_TYPE): case = case_accessors.get_case(case_id) if not case.external_id: # This case is not mapped to a facility in DHIS2. continue case_block = CaseBlock( case_id=case.case_id, external_id=case.external_id, case_type=CASE_TYPE, case_name=case.name, update={}, ) yield case_block
def update_cases(self, domain, case_type, user_id): inactive_location = self.extra_options['inactive_location'] accessor = CaseAccessors(domain) case_ids = accessor.get_case_ids_in_domain(case_type) print(f"Found {len(case_ids)} {case_type} cases in {domain}") traveler_location_id = self.extra_options['location'] case_blocks = [] skip_count = 0 for case in accessor.iter_cases(case_ids): if should_skip(case, traveler_location_id, inactive_location): skip_count += 1 elif needs_update(case): owner_id = get_owner_id(case_type) case_blocks.append(self.case_block(case, owner_id))
def handle(self, domain, case_ids, **options): self.domain = domain self.missing_nikshay_codes = set() case_accessor = CaseAccessors(domain) if not case_ids: case_ids = case_accessor.get_case_ids_in_domain(type='test') for case_id in case_ids: test_case = case_accessor.get_case(case_id) case_properties = test_case.dynamic_case_properties() if self.should_update(case_properties): self.update_case(case_id, case_properties) print(case_id) print(self.missing_nikshay_codes)
def handle(self, domain, episode_case_ids, **options): case_accessor = CaseAccessors(domain) if not episode_case_ids: episode_case_ids = case_accessor.get_case_ids_in_domain(type='episode') for episode_case_id in episode_case_ids: print(episode_case_id) episode_case = case_accessor.get_case(episode_case_id) try: updater = EpisodeFacilityIDMigration(domain, episode_case) except ENikshayCaseNotFound: continue update_json = updater.update_json() if update_json: update_case(domain, episode_case_id, update_json)
def update_cases(domain, case_type, username): accessor = CaseAccessors(domain) case_ids = accessor.get_case_ids_in_domain(case_type) print(f"Found {len(case_ids)} {case_type} cases in {domain}") user_id = username_to_user_id(username) if not user_id: user_id = SYSTEM_USER_ID case_blocks = [] skip_count = 0 for case in accessor.iter_cases(case_ids): if should_skip(case): skip_count += 1 elif needs_update(case): case_blocks.append(case_block(case))
def get_expected_rows(self): # Swap out the external IDs in the test fixture for the real IDs accessor = CaseAccessors(DOMAIN) case_ids = accessor.get_case_ids_in_domain() case_ids_by_external_id = { c.external_id: c.case_id for c in accessor.get_cases(case_ids) } def substitute_real_ids(row): return { key: case_ids_by_external_id[val] if val in case_ids_by_external_id else val for key, val in row.items() } return map(substitute_real_ids, get_expected_report())
def _overwrite_report(filename, actual_report): """For use when making changes - force overwrites test data""" accessor = CaseAccessors(DOMAIN) case_ids = accessor.get_case_ids_in_domain() external_ids_by_case_id = { c.case_id: c.external_id for c in accessor.get_cases(case_ids) } rows = [[ external_ids_by_case_id[val] if val in external_ids_by_case_id else val for val in row ] for row in actual_report.rows] with open(os.path.join(os.path.dirname(__file__), 'data', filename), 'w') as f: writer = csv.writer(f) writer.writerow(actual_report.headers) writer.writerows(rows)
class ReadonlyCaseDocumentStore(ReadOnlyDocumentStore): def __init__(self, domain): self.domain = domain self.case_accessors = CaseAccessors(domain=domain) def get_document(self, doc_id): try: return self.case_accessors.get_case(doc_id).to_json() except CaseNotFound as e: raise DocumentNotFoundError(e) def iter_document_ids(self, last_id=None): # todo: support last_id return iter(self.case_accessors.get_case_ids_in_domain()) def iter_documents(self, ids): for wrapped_case in self.case_accessors.iter_cases(ids): yield wrapped_case.to_json()
def _update_case_id_properties(domain, user): """Some case properties store the ID of related cases. This updates those IDs""" accessor = CaseAccessors(domain) case_ids = accessor.get_case_ids_in_domain() cases = list(accessor.get_cases(case_ids)) case_ids_by_external_id = {c.external_id: c.case_id for c in cases} case_blocks = [] for case in cases: update = {} for k, v in case.dynamic_case_properties().items(): if v in case_ids_by_external_id: update[k] = case_ids_by_external_id[v] if update: case_blocks.append( CaseBlock( case_id=case.case_id, user_id=user._id, update=update, ).as_xml())
def handle(self, domain, **options): accessor = CaseAccessors(domain) voucher_ids = accessor.get_case_ids_in_domain(CASE_TYPE_VOUCHER) rows = [['voucher_id', 'state', 'comments', 'person_id', 'person_name']] for voucher in with_progress_bar(accessor.iter_cases(voucher_ids), len(voucher_ids)): if voucher.get_case_property('state') in ('paid', 'rejected'): person = get_person_case_from_voucher(domain, voucher.case_id) rows.append([ voucher.case_id, voucher.get_case_property('state'), voucher.get_case_property('comments'), person.case_id, "{} {}".format(person.get_case_property(PERSON_FIRST_NAME), person.get_case_property(PERSON_LAST_NAME)), ]) filename = 'voucher_statuses.csv' with open(filename, 'w') as f: writer = csv.writer(f) writer.writerows(rows) print ('{} cases have a status of paid or rejected. Details written to {}' .format(len(rows) - 1, filename))
def handle(self, domain, log_file_name, case_ids, **options): commit = options['commit'] print("Starting {} migration on {} at {}".format( "real" if commit else "fake", domain, datetime.datetime.utcnow())) accessor = CaseAccessors(domain) case_ids = case_ids or accessor.get_case_ids_in_domain( type=self.case_type) with open(log_file_name, "w") as log_file: writer = csv.writer(log_file) writer.writerow(['case_id'] + [ 'current_' + case_prop for case_prop in self.case_properties_to_update ] + self.case_properties_to_update + [self.datamigration_case_property]) for case_id in with_progress_bar(case_ids): if self.is_valid_case(domain, case_id): case = accessor.get_case(case_id) updated_case_properties = self.get_case_property_updates( case, domain) needs_update = bool(updated_case_properties) updated_case_properties[ self. datamigration_case_property] = 'yes' if needs_update else 'no' writer.writerow([case.case_id] + [ case.get_case_property(case_prop) or '' for case_prop in self.case_properties_to_update ] + [ updated_case_properties.get(case_prop, '') for case_prop in (self.case_properties_to_update + [self.datamigration_case_property]) ]) if needs_update and commit: self.commit_updates(domain, case.case_id, updated_case_properties) print("Finished at {}".format(datetime.datetime.utcnow()))
class CaseDocumentStore(DocumentStore): def __init__(self, domain, case_type=None): self.domain = domain self.case_accessors = CaseAccessors(domain=domain) self.case_type = case_type def get_document(self, doc_id): try: return self.case_accessors.get_case(doc_id).to_json() except CaseNotFound as e: raise DocumentNotFoundError(e) def iter_document_ids(self): if should_use_sql_backend(self.domain): accessor = CaseReindexAccessor(self.domain, case_type=self.case_type) return iter_all_ids(accessor) else: return iter( self.case_accessors.get_case_ids_in_domain(self.case_type)) def iter_documents(self, ids): for wrapped_case in self.case_accessors.iter_cases(ids): yield wrapped_case.to_json()
class CaseAPIHelper(object): """ Simple config object for querying the APIs """ def __init__(self, domain, status=CASE_STATUS_OPEN, case_type=None, ids_only=False, footprint=False, strip_history=False, filters=None): if status not in [CASE_STATUS_ALL, CASE_STATUS_CLOSED, CASE_STATUS_OPEN]: raise ValueError("invalid case status %s" % status) self.domain = domain self.status = status self.case_type = case_type self.ids_only = ids_only self.wrap = not ids_only # if we're just querying IDs we don't need to wrap the docs self.footprint = footprint self.strip_history = strip_history self.filters = filters self.case_accessors = CaseAccessors(self.domain) def _case_results(self, case_id_list): def _filter(res): if self.filters: for path, val in self.filters.items(): actual_val = safe_index(res.case_json, path.split("/")) if actual_val != val: # closed=false => case.closed == False if val in ('null', 'true', 'false'): if actual_val != json.loads(val): return False else: return False return True if self.filters and not self.footprint: base_results = self._populate_results(case_id_list) return filter(_filter, base_results) if self.footprint: initial_case_ids = set(case_id_list) dependent_case_ids = get_dependent_case_info(self.domain, initial_case_ids).all_ids all_case_ids = initial_case_ids | dependent_case_ids else: all_case_ids = case_id_list if self.ids_only: return [CaseAPIResult(domain=self.domain, id=case_id, id_only=True) for case_id in all_case_ids] else: return self._populate_results(all_case_ids) def _populate_results(self, case_id_list): if should_use_sql_backend(self.domain): base_results = [CaseAPIResult(domain=self.domain, couch_doc=case, id_only=self.ids_only) for case in self.case_accessors.iter_cases(case_id_list)] else: base_results = [CaseAPIResult(domain=self.domain, couch_doc=case, id_only=self.ids_only) for case in iter_cases(case_id_list, self.strip_history, self.wrap)] return base_results def get_all(self): status = self.status or CASE_STATUS_ALL if status == CASE_STATUS_ALL: case_ids = self.case_accessors.get_case_ids_in_domain(self.case_type) elif status == CASE_STATUS_OPEN: case_ids = self.case_accessors.get_open_case_ids_in_domain_by_type(self.case_type) else: raise ValueError("Invalid value for 'status': '%s'" % status) return self._case_results(case_ids) def get_owned(self, user_id): try: user = CouchUser.get_by_user_id(user_id, self.domain) except KeyError: user = None try: owner_ids = user.get_owner_ids() except AttributeError: owner_ids = [user_id] closed = { CASE_STATUS_OPEN: False, CASE_STATUS_CLOSED: True, CASE_STATUS_ALL: None, }[self.status] ids = self.case_accessors.get_case_ids_by_owners(owner_ids, closed=closed) return self._case_results(ids)
class ImporterTest(TestCase): def setUp(self): super(ImporterTest, self).setUp() self.domain_obj = create_domain("importer-test") self.domain = self.domain_obj.name self.default_case_type = 'importer-test-casetype' self.couch_user = WebUser.create(None, "test", "foobar", None, None) self.couch_user.add_domain_membership(self.domain, is_admin=True) self.couch_user.save() self.accessor = CaseAccessors(self.domain) self.factory = CaseFactory(domain=self.domain, case_defaults={ 'case_type': self.default_case_type, }) delete_all_cases() def tearDown(self): self.couch_user.delete() self.domain_obj.delete() super(ImporterTest, self).tearDown() def _config(self, col_names, search_column=None, case_type=None, search_field='case_id', create_new_cases=True): return ImporterConfig( couch_user_id=self.couch_user._id, case_type=case_type or self.default_case_type, excel_fields=col_names, case_fields=[''] * len(col_names), custom_fields=col_names, search_column=search_column or col_names[0], search_field=search_field, create_new_cases=create_new_cases, ) @run_with_all_backends @patch('corehq.apps.case_importer.tasks.bulk_import_async.update_state') def testImportFileMissing(self, update_state): # by using a made up upload_id, we ensure it's not referencing any real file case_upload = CaseUploadRecord(upload_id=str(uuid.uuid4()), task_id=str(uuid.uuid4())) case_upload.save() res = bulk_import_async.delay(self._config(['anything']), self.domain, case_upload.upload_id) self.assertIsInstance(res.result, Ignore) update_state.assert_called_with( state=states.FAILURE, meta=get_interned_exception( 'Sorry, your session has expired. Please start over and try again.' )) self.assertEqual(0, len(get_case_ids_in_domain(self.domain))) @run_with_all_backends def testImportBasic(self): config = self._config(['case_id', 'age', 'sex', 'location']) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], ['case_id-0', 'age-0', 'sex-0', 'location-0'], ['case_id-1', 'age-1', 'sex-1', 'location-1'], ['case_id-2', 'age-2', 'sex-2', 'location-2'], ['case_id-3', 'age-3', 'sex-3', 'location-3'], ['case_id-4', 'age-4', 'sex-4', 'location-4'], ) res = do_import(file, config, self.domain) self.assertEqual(5, res['created_count']) self.assertEqual(0, res['match_count']) self.assertFalse(res['errors']) self.assertEqual(1, res['num_chunks']) case_ids = self.accessor.get_case_ids_in_domain() cases = list(self.accessor.get_cases(case_ids)) self.assertEqual(5, len(cases)) properties_seen = set() for case in cases: self.assertEqual(self.couch_user._id, case.user_id) self.assertEqual(self.couch_user._id, case.owner_id) self.assertEqual(self.default_case_type, case.type) for prop in ['age', 'sex', 'location']: self.assertTrue(prop in case.get_case_property(prop)) self.assertFalse( case.get_case_property(prop) in properties_seen) properties_seen.add(case.get_case_property(prop)) @run_with_all_backends def testImportNamedColumns(self): config = self._config(['case_id', 'age', 'sex', 'location']) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], ['case_id-0', 'age-0', 'sex-0', 'location-0'], ['case_id-1', 'age-1', 'sex-1', 'location-1'], ['case_id-2', 'age-2', 'sex-2', 'location-2'], ['case_id-3', 'age-3', 'sex-3', 'location-3'], ) res = do_import(file, config, self.domain) self.assertEqual(4, res['created_count']) self.assertEqual(4, len(self.accessor.get_case_ids_in_domain())) @run_with_all_backends def testImportTrailingWhitespace(self): cols = ['case_id', 'age', 'sex\xa0', 'location'] config = self._config(cols) file = make_worksheet_wrapper( ['case_id', 'age', 'sex\xa0', 'location'], ['case_id-0', 'age-0', 'sex\xa0-0', 'location-0'], ) res = do_import(file, config, self.domain) self.assertEqual(1, res['created_count']) case_ids = self.accessor.get_case_ids_in_domain() self.assertEqual(1, len(case_ids)) case = self.accessor.get_case(case_ids[0]) self.assertTrue(bool(case.get_case_property( 'sex'))) # make sure the value also got properly set @run_with_all_backends def testCaseIdMatching(self): # bootstrap a stub case [case] = self.factory.create_or_update_case( CaseStructure(attrs={ 'create': True, 'update': { 'importer_test_prop': 'foo' }, })) self.assertEqual(1, len(self.accessor.get_case_ids_in_domain())) config = self._config(['case_id', 'age', 'sex', 'location']) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], [case.case_id, 'age-0', 'sex-0', 'location-0'], [case.case_id, 'age-1', 'sex-1', 'location-1'], [case.case_id, 'age-2', 'sex-2', 'location-2'], ) res = do_import(file, config, self.domain) self.assertEqual(0, res['created_count']) self.assertEqual(3, res['match_count']) self.assertFalse(res['errors']) # shouldn't create any more cases, just the one case_ids = self.accessor.get_case_ids_in_domain() self.assertEqual(1, len(case_ids)) [case] = self.accessor.get_cases(case_ids) for prop in ['age', 'sex', 'location']: self.assertTrue(prop in case.get_case_property(prop)) # shouldn't touch existing properties self.assertEqual('foo', case.get_case_property('importer_test_prop')) @run_with_all_backends def testCaseLookupTypeCheck(self): [case] = self.factory.create_or_update_case( CaseStructure(attrs={ 'create': True, 'case_type': 'nonmatch-type', })) self.assertEqual(1, len(self.accessor.get_case_ids_in_domain())) config = self._config(['case_id', 'age', 'sex', 'location']) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], [case.case_id, 'age-0', 'sex-0', 'location-0'], [case.case_id, 'age-1', 'sex-1', 'location-1'], [case.case_id, 'age-2', 'sex-2', 'location-2'], ) res = do_import(file, config, self.domain) # because the type is wrong these shouldn't match self.assertEqual(3, res['created_count']) self.assertEqual(0, res['match_count']) self.assertEqual(4, len(self.accessor.get_case_ids_in_domain())) @run_with_all_backends def testCaseLookupDomainCheck(self): self.factory.domain = 'wrong-domain' [case] = self.factory.create_or_update_case( CaseStructure(attrs={ 'create': True, })) self.assertEqual(0, len(self.accessor.get_case_ids_in_domain())) config = self._config(['case_id', 'age', 'sex', 'location']) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], [case.case_id, 'age-0', 'sex-0', 'location-0'], [case.case_id, 'age-1', 'sex-1', 'location-1'], [case.case_id, 'age-2', 'sex-2', 'location-2'], ) res = do_import(file, config, self.domain) # because the domain is wrong these shouldn't match self.assertEqual(3, res['created_count']) self.assertEqual(0, res['match_count']) self.assertEqual(3, len(self.accessor.get_case_ids_in_domain())) @run_with_all_backends def testExternalIdMatching(self): # bootstrap a stub case external_id = 'importer-test-external-id' [case] = self.factory.create_or_update_case( CaseStructure(attrs={ 'create': True, 'external_id': external_id, })) self.assertEqual(1, len(self.accessor.get_case_ids_in_domain())) headers = ['external_id', 'age', 'sex', 'location'] config = self._config(headers, search_field='external_id') file = make_worksheet_wrapper( ['external_id', 'age', 'sex', 'location'], ['importer-test-external-id', 'age-0', 'sex-0', 'location-0'], ['importer-test-external-id', 'age-1', 'sex-1', 'location-1'], ['importer-test-external-id', 'age-2', 'sex-2', 'location-2'], ) res = do_import(file, config, self.domain) self.assertEqual(0, res['created_count']) self.assertEqual(3, res['match_count']) self.assertFalse(res['errors']) # shouldn't create any more cases, just the one self.assertEqual(1, len(self.accessor.get_case_ids_in_domain())) @run_with_all_backends def test_external_id_matching_on_create_with_custom_column_name(self): headers = ['id_column', 'age', 'sex', 'location'] external_id = 'external-id-test' config = self._config(headers[1:], search_column='id_column', search_field='external_id') file = make_worksheet_wrapper( ['id_column', 'age', 'sex', 'location'], ['external-id-test', 'age-0', 'sex-0', 'location-0'], ['external-id-test', 'age-1', 'sex-1', 'location-1'], ) res = do_import(file, config, self.domain) self.assertFalse(res['errors']) self.assertEqual(1, res['created_count']) self.assertEqual(1, res['match_count']) case_ids = self.accessor.get_case_ids_in_domain() self.assertEqual(1, len(case_ids)) case = self.accessor.get_case(case_ids[0]) self.assertEqual(external_id, case.external_id) def testNoCreateNew(self): config = self._config(['case_id', 'age', 'sex', 'location'], create_new_cases=False) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], ['case_id-0', 'age-0', 'sex-0', 'location-0'], ['case_id-1', 'age-1', 'sex-1', 'location-1'], ['case_id-2', 'age-2', 'sex-2', 'location-2'], ['case_id-3', 'age-3', 'sex-3', 'location-3'], ['case_id-4', 'age-4', 'sex-4', 'location-4'], ) res = do_import(file, config, self.domain) # no matching and no create new set - should do nothing self.assertEqual(0, res['created_count']) self.assertEqual(0, res['match_count']) self.assertEqual(0, len(get_case_ids_in_domain(self.domain))) def testBlankRows(self): # don't create new cases for rows left blank config = self._config(['case_id', 'age', 'sex', 'location'], create_new_cases=True) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], [None, None, None, None], ['', '', '', ''], ) res = do_import(file, config, self.domain) # no matching and no create new set - should do nothing self.assertEqual(0, res['created_count']) self.assertEqual(0, res['match_count']) self.assertEqual(0, len(get_case_ids_in_domain(self.domain))) @patch('corehq.apps.case_importer.do_import.CASEBLOCK_CHUNKSIZE', 2) def testBasicChunking(self): config = self._config(['case_id', 'age', 'sex', 'location']) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], ['case_id-0', 'age-0', 'sex-0', 'location-0'], ['case_id-1', 'age-1', 'sex-1', 'location-1'], ['case_id-2', 'age-2', 'sex-2', 'location-2'], ['case_id-3', 'age-3', 'sex-3', 'location-3'], ['case_id-4', 'age-4', 'sex-4', 'location-4'], ) res = do_import(file, config, self.domain) # 5 cases in chunks of 2 = 3 chunks self.assertEqual(3, res['num_chunks']) self.assertEqual(5, res['created_count']) self.assertEqual(5, len(get_case_ids_in_domain(self.domain))) @run_with_all_backends def testExternalIdChunking(self): # bootstrap a stub case external_id = 'importer-test-external-id' headers = ['external_id', 'age', 'sex', 'location'] config = self._config(headers, search_field='external_id') file = make_worksheet_wrapper( ['external_id', 'age', 'sex', 'location'], ['importer-test-external-id', 'age-0', 'sex-0', 'location-0'], ['importer-test-external-id', 'age-1', 'sex-1', 'location-1'], ['importer-test-external-id', 'age-2', 'sex-2', 'location-2'], ) # the first one should create the case, and the remaining two should update it res = do_import(file, config, self.domain) self.assertEqual(1, res['created_count']) self.assertEqual(2, res['match_count']) self.assertFalse(res['errors']) self.assertEqual(2, res['num_chunks']) # the lookup causes an extra chunk # should just create the one case case_ids = self.accessor.get_case_ids_in_domain() self.assertEqual(1, len(case_ids)) [case] = self.accessor.get_cases(case_ids) self.assertEqual(external_id, case.external_id) for prop in ['age', 'sex', 'location']: self.assertTrue(prop in case.get_case_property(prop)) @run_with_all_backends def testParentCase(self): headers = ['parent_id', 'name', 'case_id'] config = self._config(headers, create_new_cases=True, search_column='case_id') rows = 3 [parent_case] = self.factory.create_or_update_case( CaseStructure(attrs={'create': True})) self.assertEqual(1, len(self.accessor.get_case_ids_in_domain())) file = make_worksheet_wrapper( ['parent_id', 'name', 'case_id'], [parent_case.case_id, 'name-0', 'case_id-0'], [parent_case.case_id, 'name-1', 'case_id-1'], [parent_case.case_id, 'name-2', 'case_id-2'], ) file_missing = make_worksheet_wrapper( ['parent_id', 'name', 'case_id'], ['parent_id-0', 'name-0', 'case_id-0'], ['parent_id-1', 'name-1', 'case_id-1'], ['parent_id-2', 'name-2', 'case_id-2'], ) # Should successfully match on `rows` cases res = do_import(file, config, self.domain) self.assertEqual(rows, res['created_count']) # Should be unable to find parent case on `rows` cases res = do_import(file_missing, config, self.domain) error_column_name = 'parent_id' self.assertEqual( rows, len(res['errors'][exceptions.InvalidParentId.title] [error_column_name]['rows']), "All cases should have missing parent") def import_mock_file(self, rows): config = self._config(rows[0]) xls_file = make_worksheet_wrapper(*rows) return do_import(xls_file, config, self.domain) @run_with_all_backends def testLocationOwner(self): # This is actually testing several different things, but I figure it's # worth it, as each of these tests takes a non-trivial amount of time. non_case_sharing = LocationType.objects.create(domain=self.domain, name='lt1', shares_cases=False) case_sharing = LocationType.objects.create(domain=self.domain, name='lt2', shares_cases=True) location = make_loc('loc-1', 'Loc 1', self.domain, case_sharing.code) make_loc('loc-2', 'Loc 2', self.domain, case_sharing.code) duplicate_loc = make_loc('loc-3', 'Loc 2', self.domain, case_sharing.code) improper_loc = make_loc('loc-4', 'Loc 4', self.domain, non_case_sharing.code) res = self.import_mock_file([ ['case_id', 'name', 'owner_id', 'owner_name'], ['', 'location-owner-id', location.group_id, ''], ['', 'location-owner-code', '', location.site_code], ['', 'location-owner-name', '', location.name], ['', 'duplicate-location-name', '', duplicate_loc.name], ['', 'non-case-owning-name', '', improper_loc.name], ]) case_ids = self.accessor.get_case_ids_in_domain() cases = {c.name: c for c in list(self.accessor.get_cases(case_ids))} self.assertEqual(cases['location-owner-id'].owner_id, location.group_id) self.assertEqual(cases['location-owner-code'].owner_id, location.group_id) self.assertEqual(cases['location-owner-name'].owner_id, location.group_id) error_message = exceptions.DuplicateLocationName.title error_column_name = None self.assertIn(error_message, res['errors']) self.assertEqual( res['errors'][error_message][error_column_name]['rows'], [5]) error_message = exceptions.InvalidOwner.title self.assertIn(error_message, res['errors']) error_column_name = 'owner_name' self.assertEqual( res['errors'][error_message][error_column_name]['rows'], [6]) @run_with_all_backends def test_opened_on(self): case = self.factory.create_case() new_date = '2015-04-30T14:41:53.000000Z' with flag_enabled('BULK_UPLOAD_DATE_OPENED'): self.import_mock_file([['case_id', 'date_opened'], [case.case_id, new_date]]) case = CaseAccessors(self.domain).get_case(case.case_id) self.assertEqual(case.opened_on, PhoneTime(parse_datetime(new_date)).done())
class ENikshay2BMigrator(object): def __init__(self, domain, commit): self.domain = domain self.commit = commit self.accessor = CaseAccessors(self.domain) self.factory = CaseFactory(self.domain) self.total_persons = 0 self.total_occurrences = 0 self.total_episodes = 0 self.total_tests = 0 self.total_referrals = 0 self.total_trails = 0 self.total_secondary_owners = 0 self.total_drtb_hiv = 0 @property @memoized def locations(self): return { loc.location_id: loc for loc in SQLLocation.objects.filter( domain=self.domain).prefetch_related('location_type') } @property @memoized def location_ids_by_pk(self): return {loc.pk: loc.location_id for loc in self.locations.values()} def get_ancestors_by_type(self, location): """Get all direct ancestors found in self.locations""" ancestors_by_type = {location.location_type.code: location} loc = location while loc.parent_id and loc.parent_id in self.location_ids_by_pk: parent = self.locations[self.location_ids_by_pk[loc.parent_id]] ancestors_by_type[parent.location_type.code] = parent loc = parent return ancestors_by_type def migrate(self): person_ids = self.get_relevant_person_case_ids() persons = self.get_relevant_person_case_sets(person_ids) for person in with_progress_bar(persons, len(person_ids)): self.migrate_person_case_set(person) def get_relevant_person_case_ids(self): return self.accessor.get_case_ids_in_domain(CASE_TYPE_PERSON) def get_relevant_person_case_sets(self, person_ids): """ Generator returning all relevant cases for the migration, grouped by person. This is a pretty nasty method, but it was the only way I could figure out how to group the queries together, rather than performing multiple queries per person case. """ for person_chunk in chunked(person_ids, 100): person_chunk = list(filter(None, person_chunk)) all_persons = {} # case_id: PersonCaseSet for person in self.accessor.get_cases(person_chunk): # enrolled_in_private is blank/not set AND case_version is blank/not set # AND owner_id is within the location set being migrated if (person.get_case_property(ENROLLED_IN_PRIVATE) != 'true' and not person.get_case_property(CASE_VERSION)): all_persons[person.case_id] = PersonCaseSet(person) referrals_and_occurrences_to_person = {} type_to_bucket = { CASE_TYPE_OCCURRENCE: 'occurrences', CASE_TYPE_REFERRAL: 'referrals', CASE_TYPE_TRAIL: 'trails' } for case in self.accessor.get_reverse_indexed_cases( [person_id for person_id in all_persons]): bucket = type_to_bucket.get(case.type, None) if bucket: for index in case.indices: if index.referenced_id in all_persons: getattr(all_persons[index.referenced_id], bucket).append(case) if bucket != 'trails': referrals_and_occurrences_to_person[ case.case_id] = index.referenced_id break type_to_bucket = { CASE_TYPE_EPISODE: 'episodes', CASE_TYPE_TEST: 'tests', CASE_TYPE_TRAIL: 'trails' } episodes_to_person = {} for case in self.accessor.get_reverse_indexed_cases( referrals_and_occurrences_to_person.keys()): bucket = type_to_bucket.get(case.type, None) if bucket: for index in case.indices: person_id = referrals_and_occurrences_to_person.get( index.referenced_id) if person_id: getattr(all_persons[person_id], bucket).append(case) if case.type == CASE_TYPE_EPISODE: episodes_to_person[case.case_id] = person_id break for case in self.accessor.get_reverse_indexed_cases( episodes_to_person.keys()): if case.type == CASE_TYPE_DRTB_HIV_REFERRAL: for index in case.indices: person_id = episodes_to_person.get(index.referenced_id) if person_id: all_persons[person_id].drtb_hiv.append(case) break for person in all_persons.values(): if person.occurrences: person.latest_occurrence = max( (case.opened_on, case) for case in person.occurrences)[1] yield person
def test(self): migrator = ENikshay2BMigrator(self.domain, commit=True) # first check some utils person_case_ids = migrator.get_relevant_person_case_ids() person_case_sets = list( migrator.get_relevant_person_case_sets(person_case_ids)) self.assertEqual(1, len(person_case_sets)) person = person_case_sets[0] self.assertEqual('roland-deschain', person.person.case_id) self.assertItemsEqual(['roland-deschain-occurrence'], [c.case_id for c in person.occurrences]) self.assertItemsEqual(['roland-deschain-occurrence-episode'], [c.case_id for c in person.episodes]) self.assertItemsEqual(['roland-deschain-occurrence-test'], [c.case_id for c in person.tests]) self.assertItemsEqual(['roland-deschain-referral'], [c.case_id for c in person.referrals]) self.assertItemsEqual(['roland-deschain-referral-trail'], [c.case_id for c in person.trails]) self.assertItemsEqual( ['roland-deschain-occurrence-episode-drtb_hiv_referral'], [c.case_id for c in person.drtb_hiv]) # run the actual migration migrator.migrate() # check the results accessor = CaseAccessors(self.domain) new_person = accessor.get_case(person.person.case_id) self.assertDictContainsSubset( { 'area': 'phi_area', 'referred_outside_enikshay_date': 'date_referred_out', 'referred_outside_enikshay_by_id': 'referred_by_id', 'contact_phone_number': '911234567890', 'current_episode_type': "confirmed_tb", 'alcohol_history': "alcohol_history", 'alcohol_deaddiction': "alcohol_deaddiction", 'tobacco_user': "******", 'occupation': "occupation", 'phone_number_other': "phone_number_other", 'phi_name': 'PHI', 'tu_name': 'TU', 'tu_id': self.locations['TU'].location_id, 'dto_name': 'DTO', 'dto_id': self.locations['DTO'].location_id, 'dataset': 'real', 'updated_by_migration': 'enikshay_2b_case_properties', }, new_person.dynamic_case_properties()) new_occurrence = accessor.get_case(person.occurrences[0].case_id) self.assertDictContainsSubset( { 'current_episode_type': 'confirmed_tb', 'disease_classification': 'disease_classification', 'site_choice': 'site_choice', 'site_detail': 'site_detail', 'key_population_status': 'key_population_status', 'key_populations': 'key_populations', }, new_occurrence.dynamic_case_properties()) new_episode = accessor.get_case(person.episodes[0].case_id) self.assertDictContainsSubset( { 'treatment_status': 'initiated_second_line_treatment', 'date_of_diagnosis': 'date_reported', 'dosage_display': 'full_dosage', 'dosage_summary': 'full_dosage', 'rft_general': 'diagnosis_dstb', 'diagnosis_test_type': 'chest_x-ray', 'diagnosis_test_type_label': "Chest X-ray", 'is_active': 'yes', }, new_episode.dynamic_case_properties()) new_test = accessor.get_case(person.tests[0].case_id) self.assertDictContainsSubset( { 'is_direct_test_entry': 'no', 'rft_drtb_diagnosis': 'diagnostic_drtb_test_reason', 'dataset': 'real', 'rft_general': 'diagnosis_dstb', 'rft_dstb_diagnosis': 'diagnostic_test_reason', 'rft_dstb_followup': 'definitely_not_private_ntm', 'episode_case_id': 'roland-deschain-occurrence-episode', 'result_summary_display': "TB Detected\nR: Res\nCount of bacilli: 11\nthat looks infected", 'drug_resistance_list': 'r', }, new_test.dynamic_case_properties()) new_referral = accessor.get_case(person.referrals[0].case_id) self.assertDictContainsSubset( { 'referral_initiated_date': 'referral_date', 'referred_to_name': 'referred_to_location_name', 'referred_by_name': '', 'referral_rejection_reason_other_detail': 'reason_for_refusal_other_detail', 'referral_rejection_reason': 'reason_for_refusal', 'referral_closed_date': 'acceptance_refusal_date', 'accepted_by_name': 'phi', }, new_referral.dynamic_case_properties()) parent = get_parent_of_case(self.domain, new_referral, 'occurrence') self.assertEqual(new_occurrence.case_id, parent.case_id) new_trail = accessor.get_case(person.trails[0].case_id) parent = get_parent_of_case(self.domain, new_trail, 'occurrence') self.assertEqual(new_occurrence.case_id, parent.case_id) new_drtb_hiv = accessor.get_case(person.drtb_hiv[0].case_id) self.assertTrue(new_drtb_hiv.closed) secondary_owner_id = accessor.get_case_ids_in_domain( type='secondary_owner')[0] new_secondary_owner = accessor.get_case(secondary_owner_id) self.assertEqual('person_id-drtb-hiv', new_secondary_owner.name) self.assertDictContainsSubset({ 'secondary_owner_type': 'drtb-hiv', }, new_secondary_owner.dynamic_case_properties()) self.assertEqual("drtb_hiv_referral_owner", new_secondary_owner.owner_id) parent = get_parent_of_case(self.domain, new_secondary_owner, 'occurrence') self.assertEqual(new_occurrence.case_id, parent.case_id)
class ExplodeExtensionsDBTest(BaseSyncTest): def setUp(self): super(ExplodeExtensionsDBTest, self).setUp() self.accessor = CaseAccessors(self.project.name) self._create_case_structure() def tearDown(self): delete_all_cases() delete_all_xforms() super(ExplodeExtensionsDBTest, self).tearDown() def _create_case_structure(self): """ +----+ | H | +--^-+ |e +---+ +--+-+ |C +--c->| PH | +---+ +--^-+ (owned) |e +--+-+ | E | +----+ """ case_type = 'case' H = CaseStructure( case_id='host', attrs={ 'create': True, 'owner_id': '-' }, ) # No outgoing indices, so this is the root PH = CaseStructure(case_id='parent_host', attrs={ 'create': True, 'owner_id': '-' }, indices=[ CaseIndex( H, identifier='host', relationship='extension', related_type=case_type, ) ]) # This case is in the middle C = CaseStructure(case_id='child', attrs={'create': True}, indices=[ CaseIndex( PH, identifier='parent', relationship='child', related_type=case_type, ) ]) # C and E are interchangable in their position in the hierarchy since # they point at the same case E = CaseStructure(case_id='extension', attrs={ 'create': True, 'owner_id': '-' }, indices=[ CaseIndex( PH, identifier='host', relationship='extension', related_type=case_type, ) ]) self.device.post_changes([C, E]) def test_case_graph(self): cases = self.device.restore().cases self.assertEqual(['host', 'parent_host', 'extension', 'child'], topological_sort_cases(cases)) def test_child_extensions(self): self.assertEqual(4, len(self.accessor.get_case_ids_in_domain())) explode_cases(self.project.name, self.user_id, 5) case_ids = self.accessor.get_case_ids_in_domain() self.assertEqual(20, len(case_ids))
class TestCreateEnikshayCases(ENikshayLocationStructureMixin, TestCase): def setUp(self): self.domain = "enikshay-test-domain" super(TestCreateEnikshayCases, self).setUp() self.patient_detail = PatientDetail.objects.create( PregId='MH-ABD-05-16-0001', Tbunitcode=1, pname='A B C', pgender='M', page=18, poccupation='4', paadharno=867386000000, paddress='Cambridge MA', pmob='5432109876', pregdate1=date(2016, 12, 13), cname='Secondary name', caddress='Secondary address', cmob='1234567890', dcpulmunory='Y', dotname='Bubble Bubbles', dotmob='9876543210', dotpType=1, PHI=2, atbtreatment='', Ptype=4, pcategory=4, cvisitedDate1='2016-12-25 00:00:00.000', InitiationDate1='2016-12-22 16:06:47.726', dotmosignDate1='2016-12-23 00:00:00.000', ) self.outcome = Outcome.objects.create( PatientId=self.patient_detail, HIVStatus='negative', loginDate=datetime(2016, 1, 2), ) # Household.objects.create( # PatientID=patient_detail, # ) for i in range(5): Followup.objects.create( id=(i + 1), PatientID=self.patient_detail, ) self.case_accessor = CaseAccessors(self.domain) def tearDown(self): Outcome.objects.all().delete() Followup.objects.all().delete() # Household.objects.all().delete() PatientDetail.objects.all().delete() super(TestCreateEnikshayCases, self).tearDown() @run_with_all_backends @patch('custom.enikshay.nikshay_datamigration.factory.datetime') def test_case_creation(self, mock_datetime): mock_datetime.utcnow.return_value = datetime(2016, 9, 8, 1, 2, 3, 4123) call_command('create_enikshay_cases', self.domain) person_case_ids = self.case_accessor.get_case_ids_in_domain(type='person') self.assertEqual(1, len(person_case_ids)) person_case = self.case_accessor.get_case(person_case_ids[0]) self.assertEqual( OrderedDict([ ('aadhaar_number', '867386000000'), ('age', '18'), ('age_entered', '18'), ('contact_phone_number', '5432109876'), ('current_address', 'Cambridge MA'), ('current_address_district_choice', self.dto.location_id), ('current_address_state_choice', self.sto.location_id), ('dob', '1998-07-01'), ('dob_known', 'no'), ('first_name', 'A B'), ('last_name', 'C'), ('migration_created_case', 'true'), ('nikshay_id', 'MH-ABD-05-16-0001'), ('person_id', 'FROM_NIKSHAY_MH-ABD-05-16-0001'), ('phi', 'PHI'), ('secondary_contact_name_address', 'Secondary name, Secondary address'), ('secondary_contact_phone_number', '1234567890'), ('sex', 'male'), ('tu_choice', 'TU'), ]), person_case.dynamic_case_properties() ) self.assertEqual('MH-ABD-05-16-0001', person_case.external_id) self.assertEqual('A B C', person_case.name) self.assertEqual(self.phi.location_id, person_case.owner_id) # make sure the case is only created/modified by a single form self.assertEqual(1, len(person_case.xform_ids)) occurrence_case_ids = self.case_accessor.get_case_ids_in_domain(type='occurrence') self.assertEqual(1, len(occurrence_case_ids)) occurrence_case = self.case_accessor.get_case(occurrence_case_ids[0]) self.assertEqual( OrderedDict([ ('current_episode_type', 'confirmed_tb'), ('hiv_status', 'negative'), ('ihv_date', '2016-12-25'), ('initial_home_visit_status', 'completed'), ('migration_created_case', 'true'), ('occurrence_episode_count', '1'), ('occurrence_id', '20160908010203004'), ]), occurrence_case.dynamic_case_properties() ) self.assertEqual('Occurrence #1', occurrence_case.name) self.assertEqual(len(occurrence_case.indices), 1) self._assertIndexEqual( CommCareCaseIndex( identifier='host', referenced_type='person', referenced_id=person_case.get_id, relationship='extension', ), occurrence_case.indices[0] ) self.assertEqual('-', occurrence_case.owner_id) # make sure the case is only created/modified by a single form self.assertEqual(1, len(occurrence_case.xform_ids)) episode_case_ids = self.case_accessor.get_case_ids_in_domain(type='episode') self.assertEqual(1, len(episode_case_ids)) episode_case = self.case_accessor.get_case(episode_case_ids[0]) self.assertEqual( OrderedDict([ ('date_of_mo_signature', '2016-12-23'), ('disease_classification', 'pulmonary'), ('dots_99_enabled', 'false'), ('episode_pending_registration', 'no'), ('episode_type', 'confirmed_tb'), ('migration_created_case', 'true'), ('occupation', 'physical_mathematical_and_entineering'), ('patient_type_choice', 'treatment_after_failure'), ('treatment_initiation_date', '2016-12-22'), ('treatment_supporter_designation', 'health_worker'), ('treatment_supporter_first_name', 'Bubble'), ('treatment_supporter_last_name', 'Bubbles'), ('treatment_supporter_mobile_number', '9876543210'), ]), episode_case.dynamic_case_properties() ) self.assertEqual('Episode #1: Confirmed TB (Patient)', episode_case.name) self.assertEqual(datetime(2016, 12, 13), episode_case.opened_on) self.assertEqual('-', episode_case.owner_id) self.assertEqual(len(episode_case.indices), 1) self._assertIndexEqual( CommCareCaseIndex( identifier='host', referenced_type='occurrence', referenced_id=occurrence_case.get_id, relationship='extension', ), episode_case.indices[0] ) # make sure the case is only created/modified by a single form self.assertEqual(1, len(episode_case.xform_ids)) test_case_ids = set(self.case_accessor.get_case_ids_in_domain(type='test')) self.assertEqual(5, len(test_case_ids)) test_cases = [ self.case_accessor.get_case(test_case_id) for test_case_id in test_case_ids ] self.assertItemsEqual( [ test_case.dynamic_case_properties() for test_case in test_cases ], [ OrderedDict([ ('date_tested', ''), ('migration_created_case', 'true'), ('migration_followup_id', str(1)), ]), OrderedDict([ ('date_tested', ''), ('migration_created_case', 'true'), ('migration_followup_id', str(2)), ]), OrderedDict([ ('date_tested', ''), ('migration_created_case', 'true'), ('migration_followup_id', str(3)), ]), OrderedDict([ ('date_tested', ''), ('migration_created_case', 'true'), ('migration_followup_id', str(4)), ]), OrderedDict([ ('date_tested', ''), ('migration_created_case', 'true'), ('migration_followup_id', str(5)), ]), ] ) for test_case in test_cases: self.assertEqual('-', test_case.owner_id) self.assertEqual(len(test_case.indices), 1) self._assertIndexEqual( CommCareCaseIndex( identifier='host', referenced_type='occurrence', referenced_id=occurrence_case.get_id, relationship='extension', ), test_case.indices[0] ) @run_with_all_backends def test_case_update(self): call_command('create_enikshay_cases', self.domain) new_addhaar_number = 867386000001 self.patient_detail.paadharno = new_addhaar_number self.patient_detail.dcpulmunory = 'N' self.patient_detail.save() self.outcome.HIVStatus = 'positive' self.outcome.save() call_command('create_enikshay_cases', self.domain) person_case_ids = self.case_accessor.get_case_ids_in_domain(type='person') self.assertEqual(1, len(person_case_ids)) person_case = self.case_accessor.get_case(person_case_ids[0]) self.assertEqual(person_case.dynamic_case_properties()['aadhaar_number'], str(new_addhaar_number)) occurrence_case_ids = self.case_accessor.get_case_ids_in_domain(type='occurrence') self.assertEqual(1, len(occurrence_case_ids)) occurrence_case = self.case_accessor.get_case(occurrence_case_ids[0]) self.assertEqual(occurrence_case.dynamic_case_properties()['hiv_status'], 'positive') episode_case_ids = self.case_accessor.get_case_ids_in_domain(type='episode') self.assertEqual(1, len(episode_case_ids)) episode_case = self.case_accessor.get_case(episode_case_ids[0]) self.assertEqual(episode_case.dynamic_case_properties()['disease_classification'], 'extra_pulmonary') @run_with_all_backends def test_location_not_found(self): self.phi.delete() call_command('create_enikshay_cases', self.domain) person_case_ids = self.case_accessor.get_case_ids_in_domain(type='person') self.assertEqual(1, len(person_case_ids)) person_case = self.case_accessor.get_case(person_case_ids[0]) self.assertEqual(person_case.owner_id, ARCHIVED_CASE_OWNER_ID) self.assertEqual(person_case.dynamic_case_properties()['archive_reason'], 'migration_location_not_found') self.assertEqual(person_case.dynamic_case_properties()['migration_error'], 'location_not_found') self.assertEqual(person_case.dynamic_case_properties()['migration_error_details'], 'MH-ABD-05-16') def _assertIndexEqual(self, index_1, index_2): self.assertEqual(index_1.identifier, index_2.identifier) self.assertEqual(index_1.referenced_type, index_2.referenced_type) self.assertEqual(index_1.referenced_id, index_2.referenced_id) self.assertEqual(index_1.relationship, index_2.relationship)
def _get_case_ids_by_external_id(): accessor = CaseAccessors(DOMAIN) case_ids = accessor.get_case_ids_in_domain() return {c.external_id: c.case_id for c in accessor.get_cases(case_ids)}
class Command(BaseCommand): help = """ import payment confirmations of vouchers paid offline """ voucher_id_header = 'id' voucher_update_properties = [ 'status', 'amount', 'paymentDate', 'comments', 'failureDescription', 'paymentMode', 'checkNumber', 'bankName', 'eventType', 'case_type', ] voucher_api_properties = [ 'VoucherID', 'EventOccurDate', 'EventID', 'BeneficiaryUUID', 'BeneficiaryType', 'Location', 'Amount', 'DTOLocation', 'InvestigationType', 'PersonId', 'AgencyId', 'EnikshayApprover', 'EnikshayRole', 'EnikshayApprovalDate', ] def add_arguments(self, parser): parser.add_argument('domain') parser.add_argument('filename') parser.add_argument( '--commit', action='store_true', dest='commit', default=False, ) def handle(self, domain, filename, **options): self.domain = domain self.accessor = CaseAccessors(domain) self.commit = options['commit'] with open(filename) as f: reader = csv.reader(f) headers = reader.next() missing_headers = set(self.voucher_update_properties) - set(headers) if missing_headers: print "Missing the following headers:" for header in missing_headers: print " ", header print "\nAborting." return rows = list(reader) print "Received info on {} vouchers. Headers are:".format(len(rows)) for header in headers: print header voucher_updates = [] unrecognized_vouchers = [] voucher_ids_to_update = set() for row in rows: voucher_id = row[headers.index(self.voucher_id_header)] voucher = self.all_vouchers_in_domain.get(voucher_id) if voucher: voucher_ids_to_update.add(voucher_id) voucher_updates.append(VoucherUpdate( voucher=voucher, # This property isn't defined on the model id=voucher.case_id, **{ prop: row[headers.index(prop)] for prop in self.voucher_update_properties } )) else: unrecognized_vouchers.append(row) self.log_voucher_updates(voucher_updates) self.log_unrecognized_vouchers(headers, unrecognized_vouchers) self.log_unmodified_vouchers(voucher_ids_to_update) self.update_vouchers(voucher_updates) self.reconcile_repeat_records(voucher_updates) @property @memoized def all_vouchers_in_domain(self): voucher_ids = self.accessor.get_case_ids_in_domain(CASE_TYPE_VOUCHER) return { voucher.get_case_property(VOUCHER_ID): voucher for voucher in self.accessor.iter_cases(voucher_ids) if voucher.get_case_property('state') in ( 'approved', 'paid', 'rejected', 'expired', 'cancelled', 'canceled') or voucher.get_case_property('voucher_approval_status') in ('approved', 'partially_approved') } def write_csv(self, filename, headers, rows): filename = "voucher_confirmations-{}.csv".format(filename) print "writing {}".format(filename) with open(filename, 'w') as f: writer = csv.writer(f) writer.writerow(headers) writer.writerows(rows) def log_voucher_updates(self, voucher_updates): headers = ['ReadableID'] + self.voucher_api_properties + self.voucher_update_properties def make_row(voucher_update): api_payload = VoucherPayload.create_voucher_payload(voucher_update.voucher) return [voucher_update.voucher.get_case_property(VOUCHER_ID)] + [ api_payload[prop] for prop in self.voucher_api_properties ] + [ voucher_update[prop] for prop in self.voucher_update_properties ] rows = map(make_row, voucher_updates) self.write_csv('updates', headers, rows) def log_unrecognized_vouchers(self, headers, unrecognized_vouchers): self.write_csv('unrecognized', headers, unrecognized_vouchers) def log_unmodified_vouchers(self, voucher_ids_to_update): unmodified_vouchers = [ voucher for voucher_id, voucher in self.all_vouchers_in_domain.items() if voucher_id not in voucher_ids_to_update ] headers = ['ReadableID', 'URL', 'state', 'voucher_approval_status'] + self.voucher_api_properties def make_row(voucher): api_payload = VoucherPayload.create_voucher_payload(voucher) return [ voucher.get_case_property(VOUCHER_ID), 'https://enikshay.in/a/enikshay/reports/case_data/{}'.format(voucher.case_id), voucher.get_case_property('state'), voucher.get_case_property('voucher_approval_status'), ] + [ api_payload[prop] for prop in self.voucher_api_properties ] rows = map(make_row, unmodified_vouchers) self.write_csv('updates', headers, rows) def update_vouchers(self, voucher_updates): print "updating voucher cases" for chunk in chunked(with_progress_bar(voucher_updates), 100): updates = [ (update.case_id, update.properties, False) for update in chunk ] if self.commit: bulk_update_cases(self.domain, updates, self.__module__) def reconcile_repeat_records(self, voucher_updates): """ Mark updated records as "succeeded", all others as "cancelled" Delete duplicate records if any exist """ print "Reconciling repeat records" chemist_voucher_repeater_id = 'be435d3f407bfb1016cc89ebbf8146b1' lab_voucher_repeater_id = 'be435d3f407bfb1016cc89ebbfc42a47' already_seen = set() updates_by_voucher_id = {update.id: update for update in voucher_updates} headers = ['record_id', 'voucher_id', 'status'] rows = [] get_db = (lambda: IterDB(RepeatRecord.get_db())) if self.commit else MagicMock with get_db() as iter_db: for repeater_id in [chemist_voucher_repeater_id, lab_voucher_repeater_id]: print "repeater {}".format(repeater_id) records = iter_repeat_records_by_domain(self.domain, repeater_id=repeater_id) record_count = get_repeat_record_count(self.domain, repeater_id=repeater_id) for record in with_progress_bar(records, record_count): if record.payload_id in already_seen: status = "deleted" iter_db.delete(record) elif record.payload_id in updates_by_voucher_id: # add successful attempt status = "succeeded" attempt = RepeatRecordAttempt( cancelled=False, datetime=datetime.datetime.utcnow(), failure_reason=None, success_response="Paid offline via import_voucher_confirmations", next_check=None, succeeded=True, ) record.add_attempt(attempt) iter_db.save(record) else: # mark record as canceled record.add_attempt(RepeatRecordAttempt( cancelled=True, datetime=datetime.datetime.utcnow(), failure_reason="Cancelled during import_voucher_confirmations", success_response=None, next_check=None, succeeded=False, )) iter_db.save(record) already_seen.add(record.payload_id) rows.append([record._id, record.payload_id, status]) self.write_csv('repeat_records', headers, rows)
class ExplodeExtensionsDBTest(BaseSyncTest): def setUp(self): super(ExplodeExtensionsDBTest, self).setUp() self.accessor = CaseAccessors(self.project.name) self._create_case_structure() def tearDown(self): delete_all_cases() delete_all_xforms() super(ExplodeExtensionsDBTest, self).tearDown() def _create_case_structure(self): """ +----+ | H | +--^-+ |e +---+ +--+-+ |C +--c->| PH | +---+ +--^-+ (owned) |e +--+-+ | E | +----+ """ case_type = 'case' H = CaseStructure( case_id='host', attrs={'create': True, 'owner_id': '-'}, ) # No outgoing indices, so this is the root PH = CaseStructure( case_id='parent_host', attrs={'create': True, 'owner_id': '-'}, indices=[CaseIndex( H, identifier='host', relationship='extension', related_type=case_type, )] ) # This case is in the middle C = CaseStructure( case_id='child', attrs={'create': True}, indices=[CaseIndex( PH, identifier='parent', relationship='child', related_type=case_type, )] ) # C and E are interchangable in their position in the hierarchy since # they point at the same case E = CaseStructure( case_id='extension', attrs={'create': True, 'owner_id': '-'}, indices=[CaseIndex( PH, identifier='host', relationship='extension', related_type=case_type, )] ) self.device.post_changes([C, E]) def test_case_graph(self): cases = self.device.restore().cases self.assertEqual( ['host', 'parent_host', 'extension', 'child'], topological_sort_cases(cases) ) def test_child_extensions(self): self.assertEqual(4, len(self.accessor.get_case_ids_in_domain())) explode_cases(self.project.name, self.user_id, 5) case_ids = self.accessor.get_case_ids_in_domain() self.assertEqual(20, len(case_ids))
class ReprocessSubmissionStubTests(TestCase): @classmethod def setUpClass(cls): super(ReprocessSubmissionStubTests, cls).setUpClass() cls.domain = uuid.uuid4().hex cls.product = SQLProduct.objects.create(domain=cls.domain, product_id='product1', name='product1') @classmethod def tearDownClass(cls): cls.product.delete() super(ReprocessSubmissionStubTests, cls).tearDownClass() def setUp(self): super(ReprocessSubmissionStubTests, self).setUp() self.factory = CaseFactory(domain=self.domain) self.formdb = FormAccessors(self.domain) self.casedb = CaseAccessors(self.domain) self.ledgerdb = LedgerAccessors(self.domain) def tearDown(self): FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain) super(ReprocessSubmissionStubTests, self).tearDown() def test_reprocess_unfinished_submission_case_create(self): case_id = uuid.uuid4().hex with _patch_save_to_raise_error(self): self.factory.create_or_update_cases([ CaseStructure(case_id=case_id, attrs={ 'case_type': 'parent', 'create': True }) ]) stubs = UnfinishedSubmissionStub.objects.filter(domain=self.domain, saved=False).all() self.assertEqual(1, len(stubs)) # form that was saved before case error raised normal_form_ids = self.formdb.get_all_form_ids_in_domain( 'XFormInstance') self.assertEqual(0, len(normal_form_ids)) # shows error form (duplicate of form that was saved before case error) # this is saved becuase the saving was assumed to be atomic so if there was any error it's assumed # the form didn't get saved # we don't really care about this form in this test error_forms = self.formdb.get_forms_by_type('XFormError', 10) self.assertEqual(1, len(error_forms)) self.assertIsNone(error_forms[0].orig_id) self.assertEqual(error_forms[0].form_id, stubs[0].xform_id) self.assertEqual(0, len(self.casedb.get_case_ids_in_domain(self.domain))) result = reprocess_unfinished_stub(stubs[0]) self.assertEqual(1, len(result.cases)) case_ids = self.casedb.get_case_ids_in_domain() self.assertEqual(1, len(case_ids)) self.assertEqual(case_id, case_ids[0]) with self.assertRaises(UnfinishedSubmissionStub.DoesNotExist): UnfinishedSubmissionStub.objects.get(pk=stubs[0].pk) def test_reprocess_unfinished_submission_case_update(self): case_id = uuid.uuid4().hex form_ids = [] form_ids.append( submit_case_blocks( CaseBlock(case_id=case_id, create=True, case_type='box').as_string(), self.domain)[0].form_id) with _patch_save_to_raise_error(self): submit_case_blocks( CaseBlock(case_id=case_id, update={ 'prop': 'a' }).as_string(), self.domain) stubs = UnfinishedSubmissionStub.objects.filter(domain=self.domain, saved=False).all() self.assertEqual(1, len(stubs)) form_ids.append(stubs[0].xform_id) # submit second form with case update form_ids.append( submit_case_blocks( CaseBlock(case_id=case_id, update={ 'prop': 'b' }).as_string(), self.domain)[0].form_id) case = self.casedb.get_case(case_id) self.assertEqual(2, len(case.xform_ids)) self.assertEqual('b', case.get_case_property('prop')) result = reprocess_unfinished_stub(stubs[0]) self.assertEqual(1, len(result.cases)) self.assertEqual(0, len(result.ledgers)) case = self.casedb.get_case(case_id) self.assertEqual('b', case.get_case_property( 'prop')) # should be property value from most recent form self.assertEqual(3, len(case.xform_ids)) self.assertEqual(form_ids, case.xform_ids) with self.assertRaises(UnfinishedSubmissionStub.DoesNotExist): UnfinishedSubmissionStub.objects.get(pk=stubs[0].pk) def test_reprocess_unfinished_submission_ledger_create(self): from corehq.apps.commtrack.tests.util import get_single_balance_block case_id = uuid.uuid4().hex self.factory.create_or_update_cases([ CaseStructure(case_id=case_id, attrs={ 'case_type': 'parent', 'create': True }) ]) with _patch_save_to_raise_error(self): submit_case_blocks( get_single_balance_block(case_id, 'product1', 100), self.domain) stubs = UnfinishedSubmissionStub.objects.filter(domain=self.domain, saved=False).all() self.assertEqual(1, len(stubs)) ledgers = self.ledgerdb.get_ledger_values_for_case(case_id) self.assertEqual(0, len(ledgers)) case = self.casedb.get_case(case_id) self.assertEqual(1, len(case.xform_ids)) ledger_transactions = self.ledgerdb.get_ledger_transactions_for_case( case_id) self.assertEqual(0, len(ledger_transactions)) result = reprocess_unfinished_stub(stubs[0]) self.assertEqual(1, len(result.cases)) self.assertEqual(1, len(result.ledgers)) ledgers = self.ledgerdb.get_ledger_values_for_case(case_id) self.assertEqual(1, len(ledgers)) ledger_transactions = self.ledgerdb.get_ledger_transactions_for_case( case_id) self.assertEqual(1, len(ledger_transactions)) # case still only has 2 transactions case = self.casedb.get_case(case_id) self.assertEqual(2, len(case.xform_ids)) if should_use_sql_backend(self.domain): self.assertTrue(case.actions[1].is_ledger_transaction) def test_reprocess_unfinished_submission_ledger_rebuild(self): from corehq.apps.commtrack.tests.util import get_single_balance_block case_id = uuid.uuid4().hex form_ids = [] form_ids.append( submit_case_blocks([ CaseBlock(case_id=case_id, create=True, case_type='shop').as_string(), get_single_balance_block(case_id, 'product1', 100), ], self.domain)[0].form_id) with _patch_save_to_raise_error(self): submit_case_blocks( get_single_balance_block(case_id, 'product1', 50), self.domain) stubs = UnfinishedSubmissionStub.objects.filter(domain=self.domain, saved=False).all() self.assertEqual(1, len(stubs)) form_ids.append(stubs[0].xform_id) # submit another form afterwards form_ids.append( submit_case_blocks( get_single_balance_block(case_id, 'product1', 25), self.domain)[0].form_id) ledgers = self.ledgerdb.get_ledger_values_for_case(case_id) self.assertEqual(1, len(ledgers)) self.assertEqual(25, ledgers[0].balance) ledger_transactions = self.ledgerdb.get_ledger_transactions_for_case( case_id) if should_use_sql_backend(self.domain): self.assertEqual(2, len(ledger_transactions)) else: # includes extra consumption transaction self.assertEqual(3, len(ledger_transactions)) # should rebuild ledger transactions result = reprocess_unfinished_stub(stubs[0]) self.assertEqual(1, len(result.cases)) self.assertEqual(1, len(result.ledgers)) ledgers = self.ledgerdb.get_ledger_values_for_case(case_id) self.assertEqual(1, len(ledgers)) # still only 1 self.assertEqual(25, ledgers[0].balance) ledger_transactions = self.ledgerdb.get_ledger_transactions_for_case( case_id) if should_use_sql_backend(self.domain): self.assertEqual(3, len(ledger_transactions)) # make sure transactions are in correct order self.assertEqual(form_ids, [trans.form_id for trans in ledger_transactions]) self.assertEqual(100, ledger_transactions[0].updated_balance) self.assertEqual(100, ledger_transactions[0].delta) self.assertEqual(50, ledger_transactions[1].updated_balance) self.assertEqual(-50, ledger_transactions[1].delta) self.assertEqual(25, ledger_transactions[2].updated_balance) self.assertEqual(-25, ledger_transactions[2].delta) else: self.assertEqual(3, len(ledger_transactions)) self.assertEqual( form_ids, [trans.report.form_id for trans in ledger_transactions]) self.assertEqual(100, ledger_transactions[0].stock_on_hand) self.assertEqual(50, ledger_transactions[1].stock_on_hand) self.assertEqual(25, ledger_transactions[2].stock_on_hand) def test_fire_signals(self): from corehq.apps.receiverwrapper.tests.test_submit_errors import failing_signal_handler case_id = uuid.uuid4().hex form_id = uuid.uuid4().hex with failing_signal_handler('signal death'): submit_case_blocks(CaseBlock(case_id=case_id, create=True, case_type='box').as_string(), self.domain, form_id=form_id) form = self.formdb.get_form(form_id) with catch_signal( successful_form_received) as form_handler, catch_signal( case_post_save) as case_handler: submit_form_locally( instance=form.get_xml(), domain=self.domain, ) case = self.casedb.get_case(case_id) if should_use_sql_backend(self.domain): self.assertEqual(form, form_handler.call_args[1]['xform']) self.assertEqual(case, case_handler.call_args[1]['case']) else: signal_form = form_handler.call_args[1]['xform'] self.assertEqual(form.form_id, signal_form.form_id) self.assertEqual(form.get_rev, signal_form.get_rev) signal_case = case_handler.call_args[1]['case'] self.assertEqual(case.case_id, signal_case.case_id) self.assertEqual(case.get_rev, signal_case.get_rev)
class ExplodeCasesDbTest(TestCase): @classmethod def setUpClass(cls): super(ExplodeCasesDbTest, cls).setUpClass() delete_all_cases() cls.domain = Domain(name='foo') cls.domain.save() cls.user = CommCareUser.create(cls.domain.name, 'somebody', 'password') cls.user_id = cls.user._id def setUp(self): self.accessor = CaseAccessors(self.domain.name) delete_all_cases() delete_all_xforms() def tearDown(self): delete_all_cases() delete_all_xforms() @classmethod def tearDownClass(cls): cls.user.delete() cls.domain.delete() super(ExplodeCasesDbTest, cls).tearDownClass() @run_with_all_backends def test_simple(self): caseblock = CaseBlock( create=True, case_id=uuid.uuid4().hex, user_id=self.user_id, owner_id=self.user_id, case_type='exploder-type', ).as_string().decode('utf-8') submit_case_blocks([caseblock], self.domain.name) self.assertEqual(1, len(self.accessor.get_case_ids_in_domain())) explode_cases(self.domain.name, self.user_id, 10) case_ids = self.accessor.get_case_ids_in_domain() cases_back = list(self.accessor.iter_cases(case_ids)) self.assertEqual(10, len(cases_back)) for case in cases_back: self.assertEqual(self.user_id, case.owner_id) @run_with_all_backends def test_skip_user_case(self): caseblock = CaseBlock( create=True, case_id=uuid.uuid4().hex, user_id=self.user_id, owner_id=self.user_id, case_type='commcare-user', ).as_string().decode('utf-8') submit_case_blocks([caseblock], self.domain.name) self.assertEqual(1, len(self.accessor.get_case_ids_in_domain())) explode_cases(self.domain.name, self.user_id, 10) case_ids = self.accessor.get_case_ids_in_domain() cases_back = list(self.accessor.iter_cases(case_ids)) self.assertEqual(1, len(cases_back)) for case in cases_back: self.assertEqual(self.user_id, case.owner_id) @run_with_all_backends def test_parent_child(self): parent_id = uuid.uuid4().hex parent_type = 'exploder-parent-type' parent_block = CaseBlock( create=True, case_id=parent_id, user_id=self.user_id, owner_id=self.user_id, case_type=parent_type, ).as_string().decode('utf-8') child_id = uuid.uuid4().hex child_block = CaseBlock( create=True, case_id=child_id, user_id=self.user_id, owner_id=self.user_id, case_type='exploder-child-type', index={'parent': (parent_type, parent_id)}, ).as_string().decode('utf-8') submit_case_blocks([parent_block, child_block], self.domain.name) self.assertEqual(2, len(self.accessor.get_case_ids_in_domain())) explode_cases(self.domain.name, self.user_id, 5) case_ids = self.accessor.get_case_ids_in_domain() cases_back = list(self.accessor.iter_cases(case_ids)) self.assertEqual(10, len(cases_back)) parent_cases = {p.case_id: p for p in [case for case in cases_back if case.type == parent_type]} self.assertEqual(5, len(parent_cases)) child_cases = [case for case in cases_back if case.type == 'exploder-child-type'] self.assertEqual(5, len(child_cases)) child_indices = [child.indices[0].referenced_id for child in child_cases] # make sure they're different self.assertEqual(len(child_cases), len(set(child_indices))) for child in child_cases: self.assertEqual(1, len(child.indices)) self.assertTrue(child.indices[0].referenced_id in parent_cases)
class ExplodeCasesDbTest(TestCase): @classmethod def setUpClass(cls): super(ExplodeCasesDbTest, cls).setUpClass() delete_all_cases() cls.domain = Domain(name='foo') cls.domain.save() cls.user = CommCareUser.create(cls.domain.name, 'somebody', 'password') cls.user_id = cls.user._id def setUp(self): self.accessor = CaseAccessors(self.domain.name) delete_all_cases() delete_all_xforms() def tearDown(self): delete_all_cases() delete_all_xforms() @classmethod def tearDownClass(cls): cls.user.delete() cls.domain.delete() super(ExplodeCasesDbTest, cls).tearDownClass() @run_with_all_backends def test_simple(self): caseblock = CaseBlock( create=True, case_id=uuid.uuid4().hex, user_id=self.user_id, owner_id=self.user_id, case_type='exploder-type', ).as_string() submit_case_blocks([caseblock], self.domain.name) self.assertEqual(1, len(self.accessor.get_case_ids_in_domain())) explode_cases(self.domain.name, self.user_id, 10) case_ids = self.accessor.get_case_ids_in_domain() cases_back = list(self.accessor.iter_cases(case_ids)) self.assertEqual(10, len(cases_back)) for case in cases_back: self.assertEqual(self.user_id, case.owner_id) @run_with_all_backends def test_skip_user_case(self): caseblock = CaseBlock( create=True, case_id=uuid.uuid4().hex, user_id=self.user_id, owner_id=self.user_id, case_type='commcare-user', ).as_string() submit_case_blocks([caseblock], self.domain.name) self.assertEqual(1, len(self.accessor.get_case_ids_in_domain())) explode_cases(self.domain.name, self.user_id, 10) case_ids = self.accessor.get_case_ids_in_domain() cases_back = list(self.accessor.iter_cases(case_ids)) self.assertEqual(1, len(cases_back)) for case in cases_back: self.assertEqual(self.user_id, case.owner_id) @run_with_all_backends def test_parent_child(self): parent_id = uuid.uuid4().hex parent_type = 'exploder-parent-type' parent_block = CaseBlock( create=True, case_id=parent_id, user_id=self.user_id, owner_id=self.user_id, case_type=parent_type, ).as_string() child_id = uuid.uuid4().hex child_block = CaseBlock( create=True, case_id=child_id, user_id=self.user_id, owner_id=self.user_id, case_type='exploder-child-type', index={ 'parent': (parent_type, parent_id) }, ).as_string() submit_case_blocks([parent_block, child_block], self.domain.name) self.assertEqual(2, len(self.accessor.get_case_ids_in_domain())) explode_cases(self.domain.name, self.user_id, 5) case_ids = self.accessor.get_case_ids_in_domain() cases_back = list(self.accessor.iter_cases(case_ids)) self.assertEqual(10, len(cases_back)) parent_cases = { p.case_id: p for p in [case for case in cases_back if case.type == parent_type] }
class ReprocessSubmissionStubTests(TestCase): @classmethod def setUpClass(cls): super(ReprocessSubmissionStubTests, cls).setUpClass() cls.domain = uuid.uuid4().hex cls.product = SQLProduct.objects.create(domain=cls.domain, product_id='product1', name='product1') @classmethod def tearDownClass(cls): cls.product.delete() super(ReprocessSubmissionStubTests, cls).tearDownClass() def setUp(self): super(ReprocessSubmissionStubTests, self).setUp() self.factory = CaseFactory(domain=self.domain) self.formdb = FormAccessors(self.domain) self.casedb = CaseAccessors(self.domain) self.ledgerdb = LedgerAccessors(self.domain) def tearDown(self): FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain) super(ReprocessSubmissionStubTests, self).tearDown() def test_reprocess_unfinished_submission_case_create(self): case_id = uuid.uuid4().hex with _patch_save_to_raise_error(self): self.factory.create_or_update_cases([ CaseStructure(case_id=case_id, attrs={'case_type': 'parent', 'create': True}) ]) stubs = UnfinishedSubmissionStub.objects.filter(domain=self.domain, saved=False).all() self.assertEqual(1, len(stubs)) # form that was saved before case error raised normal_form_ids = self.formdb.get_all_form_ids_in_domain('XFormInstance') self.assertEqual(0, len(normal_form_ids)) # shows error form (duplicate of form that was saved before case error) # this is saved becuase the saving was assumed to be atomic so if there was any error it's assumed # the form didn't get saved # we don't really care about this form in this test error_forms = self.formdb.get_forms_by_type('XFormError', 10) self.assertEqual(1, len(error_forms)) self.assertIsNone(error_forms[0].orig_id) self.assertEqual(error_forms[0].form_id, stubs[0].xform_id) self.assertEqual(0, len(self.casedb.get_case_ids_in_domain(self.domain))) result = reprocess_unfinished_stub(stubs[0]) self.assertEqual(1, len(result.cases)) case_ids = self.casedb.get_case_ids_in_domain() self.assertEqual(1, len(case_ids)) self.assertEqual(case_id, case_ids[0]) with self.assertRaises(UnfinishedSubmissionStub.DoesNotExist): UnfinishedSubmissionStub.objects.get(pk=stubs[0].pk) def test_reprocess_unfinished_submission_case_update(self): case_id = uuid.uuid4().hex form_ids = [] form_ids.append(submit_case_blocks( CaseBlock(case_id=case_id, create=True, case_type='box').as_string().decode('utf-8'), self.domain )[0].form_id) with _patch_save_to_raise_error(self): submit_case_blocks( CaseBlock(case_id=case_id, update={'prop': 'a'}).as_string().decode('utf-8'), self.domain ) stubs = UnfinishedSubmissionStub.objects.filter(domain=self.domain, saved=False).all() self.assertEqual(1, len(stubs)) form_ids.append(stubs[0].xform_id) # submit second form with case update form_ids.append(submit_case_blocks( CaseBlock(case_id=case_id, update={'prop': 'b'}).as_string().decode('utf-8'), self.domain )[0].form_id) case = self.casedb.get_case(case_id) self.assertEqual(2, len(case.xform_ids)) self.assertEqual('b', case.get_case_property('prop')) result = reprocess_unfinished_stub(stubs[0]) self.assertEqual(1, len(result.cases)) self.assertEqual(0, len(result.ledgers)) case = self.casedb.get_case(case_id) self.assertEqual('b', case.get_case_property('prop')) # should be property value from most recent form self.assertEqual(3, len(case.xform_ids)) self.assertEqual(form_ids, case.xform_ids) with self.assertRaises(UnfinishedSubmissionStub.DoesNotExist): UnfinishedSubmissionStub.objects.get(pk=stubs[0].pk) def test_reprocess_unfinished_submission_ledger_create(self): from corehq.apps.commtrack.tests.util import get_single_balance_block case_id = uuid.uuid4().hex self.factory.create_or_update_cases([ CaseStructure(case_id=case_id, attrs={'case_type': 'parent', 'create': True}) ]) with _patch_save_to_raise_error(self): submit_case_blocks( get_single_balance_block(case_id, 'product1', 100), self.domain ) stubs = UnfinishedSubmissionStub.objects.filter(domain=self.domain, saved=False).all() self.assertEqual(1, len(stubs)) ledgers = self.ledgerdb.get_ledger_values_for_case(case_id) self.assertEqual(0, len(ledgers)) case = self.casedb.get_case(case_id) self.assertEqual(1, len(case.xform_ids)) ledger_transactions = self.ledgerdb.get_ledger_transactions_for_case(case_id) self.assertEqual(0, len(ledger_transactions)) result = reprocess_unfinished_stub(stubs[0]) self.assertEqual(1, len(result.cases)) self.assertEqual(1, len(result.ledgers)) ledgers = self.ledgerdb.get_ledger_values_for_case(case_id) self.assertEqual(1, len(ledgers)) ledger_transactions = self.ledgerdb.get_ledger_transactions_for_case(case_id) self.assertEqual(1, len(ledger_transactions)) # case still only has 2 transactions case = self.casedb.get_case(case_id) self.assertEqual(2, len(case.xform_ids)) if should_use_sql_backend(self.domain): self.assertTrue(case.actions[1].is_ledger_transaction) def test_reprocess_unfinished_submission_ledger_rebuild(self): from corehq.apps.commtrack.tests.util import get_single_balance_block case_id = uuid.uuid4().hex form_ids = [] form_ids.append(submit_case_blocks( [ CaseBlock(case_id=case_id, create=True, case_type='shop').as_string().decode('utf-8'), get_single_balance_block(case_id, 'product1', 100), ], self.domain )[0].form_id) with _patch_save_to_raise_error(self): submit_case_blocks( get_single_balance_block(case_id, 'product1', 50), self.domain ) stubs = UnfinishedSubmissionStub.objects.filter(domain=self.domain, saved=False).all() self.assertEqual(1, len(stubs)) form_ids.append(stubs[0].xform_id) # submit another form afterwards form_ids.append(submit_case_blocks( get_single_balance_block(case_id, 'product1', 25), self.domain )[0].form_id) ledgers = self.ledgerdb.get_ledger_values_for_case(case_id) self.assertEqual(1, len(ledgers)) self.assertEqual(25, ledgers[0].balance) ledger_transactions = self.ledgerdb.get_ledger_transactions_for_case(case_id) if should_use_sql_backend(self.domain): self.assertEqual(2, len(ledger_transactions)) else: # includes extra consumption transaction self.assertEqual(3, len(ledger_transactions)) # should rebuild ledger transactions result = reprocess_unfinished_stub(stubs[0]) self.assertEqual(1, len(result.cases)) self.assertEqual(1, len(result.ledgers)) ledgers = self.ledgerdb.get_ledger_values_for_case(case_id) self.assertEqual(1, len(ledgers)) # still only 1 self.assertEqual(25, ledgers[0].balance) ledger_transactions = self.ledgerdb.get_ledger_transactions_for_case(case_id) if should_use_sql_backend(self.domain): self.assertEqual(3, len(ledger_transactions)) # make sure transactions are in correct order self.assertEqual(form_ids, [trans.form_id for trans in ledger_transactions]) self.assertEqual(100, ledger_transactions[0].updated_balance) self.assertEqual(100, ledger_transactions[0].delta) self.assertEqual(50, ledger_transactions[1].updated_balance) self.assertEqual(-50, ledger_transactions[1].delta) self.assertEqual(25, ledger_transactions[2].updated_balance) self.assertEqual(-25, ledger_transactions[2].delta) else: self.assertEqual(3, len(ledger_transactions)) self.assertEqual(form_ids, [trans.report.form_id for trans in ledger_transactions]) self.assertEqual(100, ledger_transactions[0].stock_on_hand) self.assertEqual(50, ledger_transactions[1].stock_on_hand) self.assertEqual(25, ledger_transactions[2].stock_on_hand) def test_fire_signals(self): from corehq.apps.receiverwrapper.tests.test_submit_errors import failing_signal_handler case_id = uuid.uuid4().hex form_id = uuid.uuid4().hex with failing_signal_handler('signal death'): submit_case_blocks( CaseBlock(case_id=case_id, create=True, case_type='box').as_string().decode('utf-8'), self.domain, form_id=form_id ) form = self.formdb.get_form(form_id) with catch_signal(successful_form_received) as form_handler, catch_signal(case_post_save) as case_handler: submit_form_locally( instance=form.get_xml(), domain=self.domain, ) case = self.casedb.get_case(case_id) if should_use_sql_backend(self.domain): self.assertEqual(form, form_handler.call_args[1]['xform']) self.assertEqual(case, case_handler.call_args[1]['case']) else: signal_form = form_handler.call_args[1]['xform'] self.assertEqual(form.form_id, signal_form.form_id) self.assertEqual(form.get_rev, signal_form.get_rev) signal_case = case_handler.call_args[1]['case'] self.assertEqual(case.case_id, signal_case.case_id) self.assertEqual(case.get_rev, signal_case.get_rev)
class TestCreateEnikshayCases(ENikshayLocationStructureMixin, TestCase): def setUp(self): self.domain = "enikshay-test-domain" super(TestCreateEnikshayCases, self).setUp() self.patient_detail = PatientDetail.objects.create( PregId='MH-ABD-05-16-0001', Tbunitcode=1, pname='A B C', pgender='M', page=18, poccupation='4', paadharno=867386000000, paddress='Cambridge MA', pmob='5432109876', pregdate1=date(2016, 12, 13), cname='Secondary name', caddress='Secondary address', cmob='1234567890', dcpulmunory='N', dcexpulmunory='3', dotname='Bubble Bubbles', dotmob='9876543210', dotpType=1, PHI=2, atbtreatment='', Ptype=4, pcategory=4, cvisitedDate1='2016-12-25 00:00:00.000', InitiationDate1='2016-12-22 16:06:47.726', dotmosignDate1='2016-12-23 00:00:00.000', ) self.outcome = Outcome.objects.create( PatientId=self.patient_detail, HIVStatus='Neg', loginDate=datetime(2016, 1, 2), ) # Household.objects.create( # PatientID=patient_detail, # ) self.case_accessor = CaseAccessors(self.domain) def tearDown(self): Outcome.objects.all().delete() # Household.objects.all().delete() PatientDetail.objects.all().delete() super(TestCreateEnikshayCases, self).tearDown() @run_with_all_backends @patch('custom.enikshay.nikshay_datamigration.factory.datetime') def test_case_creation(self, mock_datetime): mock_datetime.utcnow.return_value = datetime(2016, 9, 8, 1, 2, 3, 4123) call_command('create_enikshay_cases', self.domain) person_case_ids = self.case_accessor.get_case_ids_in_domain( type='person') self.assertEqual(1, len(person_case_ids)) person_case = self.case_accessor.get_case(person_case_ids[0]) self.assertEqual( OrderedDict([ ('aadhaar_number', '867386000000'), ('age', '18'), ('age_entered', '18'), ('contact_phone_number', '5432109876'), ('current_address', 'Cambridge MA'), ('current_address_district_choice', self.dto.location_id), ('current_address_state_choice', self.sto.location_id), ('dob', '{}-07-01'.format(datetime.utcnow().year - 18)), ('dob_known', 'no'), ('first_name', 'A B'), ('hiv_status', 'non_reactive'), ('last_name', 'C'), ('migration_created_case', 'true'), ('person_id', 'N-MH-ABD-05-16-0001'), ('phi', 'PHI'), ('secondary_contact_name_address', 'Secondary name, Secondary address'), ('secondary_contact_phone_number', '1234567890'), ('sex', 'male'), ('tu_choice', 'TU'), ]), person_case.dynamic_case_properties()) self.assertEqual('MH-ABD-05-16-0001', person_case.external_id) self.assertEqual('A B C', person_case.name) self.assertEqual(self.phi.location_id, person_case.owner_id) # make sure the case is only created/modified by a single form self.assertEqual(1, len(person_case.xform_ids)) occurrence_case_ids = self.case_accessor.get_case_ids_in_domain( type='occurrence') self.assertEqual(1, len(occurrence_case_ids)) occurrence_case = self.case_accessor.get_case(occurrence_case_ids[0]) self.assertEqual( OrderedDict([ ('current_episode_type', 'confirmed_tb'), ('ihv_date', '2016-12-25'), ('initial_home_visit_status', 'completed'), ('migration_created_case', 'true'), ('occurrence_episode_count', '1'), ('occurrence_id', '20160908010203004'), ]), occurrence_case.dynamic_case_properties()) self.assertEqual('Occurrence #1', occurrence_case.name) self.assertEqual(len(occurrence_case.indices), 1) self._assertIndexEqual( CommCareCaseIndex( identifier='host', referenced_type='person', referenced_id=person_case.get_id, relationship='extension', ), occurrence_case.indices[0]) self.assertEqual('-', occurrence_case.owner_id) # make sure the case is only created/modified by a single form self.assertEqual(1, len(occurrence_case.xform_ids)) episode_case_ids = self.case_accessor.get_case_ids_in_domain( type='episode') self.assertEqual(1, len(episode_case_ids)) episode_case = self.case_accessor.get_case(episode_case_ids[0]) self.assertEqual( OrderedDict([ ('adherence_schedule_date_start', '2016-12-22'), ('date_of_diagnosis', '2016-12-13'), ('date_of_mo_signature', '2016-12-23'), ('disease_classification', 'extra_pulmonary'), ('dots_99_enabled', 'false'), ('episode_pending_registration', 'no'), ('episode_type', 'confirmed_tb'), ('migration_created_case', 'true'), ('nikshay_id', 'MH-ABD-05-16-0001'), ('occupation', 'physical_mathematical_and_engineering'), ('patient_type_choice', 'treatment_after_lfu'), ('site_choice', 'abdominal'), ('treatment_initiated', 'yes_phi'), ('treatment_initiation_date', '2016-12-22'), ('treatment_supporter_designation', 'health_worker'), ('treatment_supporter_first_name', 'Bubble'), ('treatment_supporter_last_name', 'Bubbles'), ('treatment_supporter_mobile_number', '9876543210'), ]), episode_case.dynamic_case_properties()) self.assertEqual('Episode #1: Confirmed TB (Patient)', episode_case.name) self.assertEqual(datetime(2016, 12, 13), episode_case.opened_on) self.assertEqual('-', episode_case.owner_id) self.assertEqual(len(episode_case.indices), 1) self._assertIndexEqual( CommCareCaseIndex( identifier='host', referenced_type='occurrence', referenced_id=occurrence_case.get_id, relationship='extension', ), episode_case.indices[0]) # make sure the case is only created/modified by a single form self.assertEqual(1, len(episode_case.xform_ids)) @run_with_all_backends def test_case_update(self): call_command('create_enikshay_cases', self.domain) new_addhaar_number = 867386000001 self.patient_detail.paadharno = new_addhaar_number self.patient_detail.cvisitedDate1 = '2016-12-31 00:00:00.000' self.patient_detail.dcpulmunory = 'N' self.patient_detail.save() self.outcome.HIVStatus = 'Pos' self.outcome.save() call_command('create_enikshay_cases', self.domain) person_case_ids = self.case_accessor.get_case_ids_in_domain( type='person') self.assertEqual(1, len(person_case_ids)) person_case = self.case_accessor.get_case(person_case_ids[0]) self.assertEqual( person_case.dynamic_case_properties()['aadhaar_number'], str(new_addhaar_number)) self.assertEqual(person_case.dynamic_case_properties()['hiv_status'], 'reactive') occurrence_case_ids = self.case_accessor.get_case_ids_in_domain( type='occurrence') self.assertEqual(1, len(occurrence_case_ids)) occurrence_case = self.case_accessor.get_case(occurrence_case_ids[0]) self.assertEqual(occurrence_case.dynamic_case_properties()['ihv_date'], '2016-12-31') episode_case_ids = self.case_accessor.get_case_ids_in_domain( type='episode') self.assertEqual(1, len(episode_case_ids)) episode_case = self.case_accessor.get_case(episode_case_ids[0]) self.assertEqual( episode_case.dynamic_case_properties()['disease_classification'], 'extra_pulmonary') @run_with_all_backends def test_location_not_found(self): self.phi.delete() call_command('create_enikshay_cases', self.domain) person_case_ids = self.case_accessor.get_case_ids_in_domain( type='person') self.assertEqual(1, len(person_case_ids)) person_case = self.case_accessor.get_case(person_case_ids[0]) self.assertEqual(person_case.owner_id, ARCHIVED_CASE_OWNER_ID) self.assertEqual( person_case.dynamic_case_properties()['archive_reason'], 'migration_location_not_found') self.assertEqual( person_case.dynamic_case_properties()['migration_error'], 'location_not_found') self.assertEqual( person_case.dynamic_case_properties()['migration_error_details'], 'MH-ABD-05-16') def _assertIndexEqual(self, index_1, index_2): self.assertEqual(index_1.identifier, index_2.identifier) self.assertEqual(index_1.referenced_type, index_2.referenced_type) self.assertEqual(index_1.referenced_id, index_2.referenced_id) self.assertEqual(index_1.relationship, index_2.relationship)
class ImporterTest(TestCase): def setUp(self): super(ImporterTest, self).setUp() self.domain_obj = create_domain("importer-test") self.domain = self.domain_obj.name self.default_case_type = 'importer-test-casetype' self.couch_user = WebUser.create(None, "test", "foobar") self.couch_user.add_domain_membership(self.domain, is_admin=True) self.couch_user.save() self.accessor = CaseAccessors(self.domain) self.factory = CaseFactory(domain=self.domain, case_defaults={ 'case_type': self.default_case_type, }) delete_all_cases() def tearDown(self): self.couch_user.delete() self.domain_obj.delete() super(ImporterTest, self).tearDown() def _config(self, col_names, search_column=None, case_type=None, search_field='case_id', create_new_cases=True): return ImporterConfig( couch_user_id=self.couch_user._id, case_type=case_type or self.default_case_type, excel_fields=col_names, case_fields=[''] * len(col_names), custom_fields=col_names, search_column=search_column or col_names[0], search_field=search_field, create_new_cases=create_new_cases, ) @run_with_all_backends @patch('corehq.apps.case_importer.tasks.bulk_import_async.update_state') def testImportNone(self, update_state): res = bulk_import_async.delay(self._config(['anything']), self.domain, None) self.assertIsInstance(res.result, Ignore) update_state.assert_called_with( state=states.FAILURE, meta={'errors': 'Sorry, your session has expired. Please start over and try again.'}) self.assertEqual(0, len(get_case_ids_in_domain(self.domain))) @run_with_all_backends def testImportBasic(self): config = self._config(['case_id', 'age', 'sex', 'location']) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], ['case_id-0', 'age-0', 'sex-0', 'location-0'], ['case_id-1', 'age-1', 'sex-1', 'location-1'], ['case_id-2', 'age-2', 'sex-2', 'location-2'], ['case_id-3', 'age-3', 'sex-3', 'location-3'], ['case_id-4', 'age-4', 'sex-4', 'location-4'], ) res = do_import(file, config, self.domain) self.assertEqual(5, res['created_count']) self.assertEqual(0, res['match_count']) self.assertFalse(res['errors']) self.assertEqual(1, res['num_chunks']) case_ids = self.accessor.get_case_ids_in_domain() cases = list(self.accessor.get_cases(case_ids)) self.assertEqual(5, len(cases)) properties_seen = set() for case in cases: self.assertEqual(self.couch_user._id, case.user_id) self.assertEqual(self.couch_user._id, case.owner_id) self.assertEqual(self.default_case_type, case.type) for prop in ['age', 'sex', 'location']: self.assertTrue(prop in case.get_case_property(prop)) self.assertFalse(case.get_case_property(prop) in properties_seen) properties_seen.add(case.get_case_property(prop)) @run_with_all_backends def testImportNamedColumns(self): config = self._config(['case_id', 'age', 'sex', 'location']) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], ['case_id-0', 'age-0', 'sex-0', 'location-0'], ['case_id-1', 'age-1', 'sex-1', 'location-1'], ['case_id-2', 'age-2', 'sex-2', 'location-2'], ['case_id-3', 'age-3', 'sex-3', 'location-3'], ) res = do_import(file, config, self.domain) self.assertEqual(4, res['created_count']) self.assertEqual(4, len(self.accessor.get_case_ids_in_domain())) @run_with_all_backends def testImportTrailingWhitespace(self): cols = ['case_id', 'age', 'sex\xa0', 'location'] config = self._config(cols) file = make_worksheet_wrapper( ['case_id', 'age', 'sex\xa0', 'location'], ['case_id-0', 'age-0', 'sex\xa0-0', 'location-0'], ) res = do_import(file, config, self.domain) self.assertEqual(1, res['created_count']) case_ids = self.accessor.get_case_ids_in_domain() self.assertEqual(1, len(case_ids)) case = self.accessor.get_case(case_ids[0]) self.assertTrue(bool(case.get_case_property('sex'))) # make sure the value also got properly set @run_with_all_backends def testCaseIdMatching(self): # bootstrap a stub case [case] = self.factory.create_or_update_case(CaseStructure(attrs={ 'create': True, 'update': {'importer_test_prop': 'foo'}, })) self.assertEqual(1, len(self.accessor.get_case_ids_in_domain())) config = self._config(['case_id', 'age', 'sex', 'location']) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], [case.case_id, 'age-0', 'sex-0', 'location-0'], [case.case_id, 'age-1', 'sex-1', 'location-1'], [case.case_id, 'age-2', 'sex-2', 'location-2'], ) res = do_import(file, config, self.domain) self.assertEqual(0, res['created_count']) self.assertEqual(3, res['match_count']) self.assertFalse(res['errors']) # shouldn't create any more cases, just the one case_ids = self.accessor.get_case_ids_in_domain() self.assertEqual(1, len(case_ids)) [case] = self.accessor.get_cases(case_ids) for prop in ['age', 'sex', 'location']: self.assertTrue(prop in case.get_case_property(prop)) # shouldn't touch existing properties self.assertEqual('foo', case.get_case_property('importer_test_prop')) @run_with_all_backends def testCaseLookupTypeCheck(self): [case] = self.factory.create_or_update_case(CaseStructure(attrs={ 'create': True, 'case_type': 'nonmatch-type', })) self.assertEqual(1, len(self.accessor.get_case_ids_in_domain())) config = self._config(['case_id', 'age', 'sex', 'location']) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], [case.case_id, 'age-0', 'sex-0', 'location-0'], [case.case_id, 'age-1', 'sex-1', 'location-1'], [case.case_id, 'age-2', 'sex-2', 'location-2'], ) res = do_import(file, config, self.domain) # because the type is wrong these shouldn't match self.assertEqual(3, res['created_count']) self.assertEqual(0, res['match_count']) self.assertEqual(4, len(self.accessor.get_case_ids_in_domain())) @run_with_all_backends def testCaseLookupDomainCheck(self): self.factory.domain = 'wrong-domain' [case] = self.factory.create_or_update_case(CaseStructure(attrs={ 'create': True, })) self.assertEqual(0, len(self.accessor.get_case_ids_in_domain())) config = self._config(['case_id', 'age', 'sex', 'location']) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], [case.case_id, 'age-0', 'sex-0', 'location-0'], [case.case_id, 'age-1', 'sex-1', 'location-1'], [case.case_id, 'age-2', 'sex-2', 'location-2'], ) res = do_import(file, config, self.domain) # because the domain is wrong these shouldn't match self.assertEqual(3, res['created_count']) self.assertEqual(0, res['match_count']) self.assertEqual(3, len(self.accessor.get_case_ids_in_domain())) @run_with_all_backends def testExternalIdMatching(self): # bootstrap a stub case external_id = 'importer-test-external-id' [case] = self.factory.create_or_update_case(CaseStructure( attrs={ 'create': True, 'external_id': external_id, } )) self.assertEqual(1, len(self.accessor.get_case_ids_in_domain())) headers = ['external_id', 'age', 'sex', 'location'] config = self._config(headers, search_field='external_id') file = make_worksheet_wrapper( ['external_id', 'age', 'sex', 'location'], ['importer-test-external-id', 'age-0', 'sex-0', 'location-0'], ['importer-test-external-id', 'age-1', 'sex-1', 'location-1'], ['importer-test-external-id', 'age-2', 'sex-2', 'location-2'], ) res = do_import(file, config, self.domain) self.assertEqual(0, res['created_count']) self.assertEqual(3, res['match_count']) self.assertFalse(res['errors']) # shouldn't create any more cases, just the one self.assertEqual(1, len(self.accessor.get_case_ids_in_domain())) @run_with_all_backends def test_external_id_matching_on_create_with_custom_column_name(self): headers = ['id_column', 'age', 'sex', 'location'] external_id = 'external-id-test' config = self._config(headers[1:], search_column='id_column', search_field='external_id') file = make_worksheet_wrapper( ['id_column', 'age', 'sex', 'location'], ['external-id-test', 'age-0', 'sex-0', 'location-0'], ['external-id-test', 'age-1', 'sex-1', 'location-1'], ) res = do_import(file, config, self.domain) self.assertFalse(res['errors']) self.assertEqual(1, res['created_count']) self.assertEqual(1, res['match_count']) case_ids = self.accessor.get_case_ids_in_domain() self.assertEqual(1, len(case_ids)) case = self.accessor.get_case(case_ids[0]) self.assertEqual(external_id, case.external_id) def testNoCreateNew(self): config = self._config(['case_id', 'age', 'sex', 'location'], create_new_cases=False) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], ['case_id-0', 'age-0', 'sex-0', 'location-0'], ['case_id-1', 'age-1', 'sex-1', 'location-1'], ['case_id-2', 'age-2', 'sex-2', 'location-2'], ['case_id-3', 'age-3', 'sex-3', 'location-3'], ['case_id-4', 'age-4', 'sex-4', 'location-4'], ) res = do_import(file, config, self.domain) # no matching and no create new set - should do nothing self.assertEqual(0, res['created_count']) self.assertEqual(0, res['match_count']) self.assertEqual(0, len(get_case_ids_in_domain(self.domain))) def testBlankRows(self): # don't create new cases for rows left blank config = self._config(['case_id', 'age', 'sex', 'location'], create_new_cases=True) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], [None, None, None, None], ['', '', '', ''], ) res = do_import(file, config, self.domain) # no matching and no create new set - should do nothing self.assertEqual(0, res['created_count']) self.assertEqual(0, res['match_count']) self.assertEqual(0, len(get_case_ids_in_domain(self.domain))) def testBasicChunking(self): config = self._config(['case_id', 'age', 'sex', 'location']) file = make_worksheet_wrapper( ['case_id', 'age', 'sex', 'location'], ['case_id-0', 'age-0', 'sex-0', 'location-0'], ['case_id-1', 'age-1', 'sex-1', 'location-1'], ['case_id-2', 'age-2', 'sex-2', 'location-2'], ['case_id-3', 'age-3', 'sex-3', 'location-3'], ['case_id-4', 'age-4', 'sex-4', 'location-4'], ) res = do_import(file, config, self.domain, chunksize=2) # 5 cases in chunks of 2 = 3 chunks self.assertEqual(3, res['num_chunks']) self.assertEqual(5, res['created_count']) self.assertEqual(5, len(get_case_ids_in_domain(self.domain))) @run_with_all_backends def testExternalIdChunking(self): # bootstrap a stub case external_id = 'importer-test-external-id' headers = ['external_id', 'age', 'sex', 'location'] config = self._config(headers, search_field='external_id') file = make_worksheet_wrapper( ['external_id', 'age', 'sex', 'location'], ['importer-test-external-id', 'age-0', 'sex-0', 'location-0'], ['importer-test-external-id', 'age-1', 'sex-1', 'location-1'], ['importer-test-external-id', 'age-2', 'sex-2', 'location-2'], ) # the first one should create the case, and the remaining two should update it res = do_import(file, config, self.domain) self.assertEqual(1, res['created_count']) self.assertEqual(2, res['match_count']) self.assertFalse(res['errors']) self.assertEqual(2, res['num_chunks']) # the lookup causes an extra chunk # should just create the one case case_ids = self.accessor.get_case_ids_in_domain() self.assertEqual(1, len(case_ids)) [case] = self.accessor.get_cases(case_ids) self.assertEqual(external_id, case.external_id) for prop in ['age', 'sex', 'location']: self.assertTrue(prop in case.get_case_property(prop)) @run_with_all_backends def testParentCase(self): headers = ['parent_id', 'name', 'case_id'] config = self._config(headers, create_new_cases=True, search_column='case_id') rows = 3 [parent_case] = self.factory.create_or_update_case(CaseStructure(attrs={'create': True})) self.assertEqual(1, len(self.accessor.get_case_ids_in_domain())) file = make_worksheet_wrapper( ['parent_id', 'name', 'case_id'], [parent_case.case_id, 'name-0', 'case_id-0'], [parent_case.case_id, 'name-1', 'case_id-1'], [parent_case.case_id, 'name-2', 'case_id-2'], ) file_missing = make_worksheet_wrapper( ['parent_id', 'name', 'case_id'], ['parent_id-0', 'name-0', 'case_id-0'], ['parent_id-1', 'name-1', 'case_id-1'], ['parent_id-2', 'name-2', 'case_id-2'], ) # Should successfully match on `rows` cases res = do_import(file, config, self.domain) self.assertEqual(rows, res['created_count']) # Should be unable to find parent case on `rows` cases res = do_import(file_missing, config, self.domain) error_column_name = 'parent_id' self.assertEqual(rows, len(res['errors'][ImportErrors.InvalidParentId][error_column_name]['rows']), "All cases should have missing parent") def import_mock_file(self, rows): config = self._config(rows[0]) xls_file = make_worksheet_wrapper(*rows) return do_import(xls_file, config, self.domain) @run_with_all_backends def testLocationOwner(self): # This is actually testing several different things, but I figure it's # worth it, as each of these tests takes a non-trivial amount of time. non_case_sharing = LocationType.objects.create( domain=self.domain, name='lt1', shares_cases=False ) case_sharing = LocationType.objects.create( domain=self.domain, name='lt2', shares_cases=True ) location = make_loc('loc-1', 'Loc 1', self.domain, case_sharing.code) make_loc('loc-2', 'Loc 2', self.domain, case_sharing.code) duplicate_loc = make_loc('loc-3', 'Loc 2', self.domain, case_sharing.code) improper_loc = make_loc('loc-4', 'Loc 4', self.domain, non_case_sharing.code) res = self.import_mock_file([ ['case_id', 'name', 'owner_id', 'owner_name'], ['', 'location-owner-id', location.group_id, ''], ['', 'location-owner-code', '', location.site_code], ['', 'location-owner-name', '', location.name], ['', 'duplicate-location-name', '', duplicate_loc.name], ['', 'non-case-owning-name', '', improper_loc.name], ]) case_ids = self.accessor.get_case_ids_in_domain() cases = {c.name: c for c in list(self.accessor.get_cases(case_ids))} self.assertEqual(cases['location-owner-id'].owner_id, location.group_id) self.assertEqual(cases['location-owner-code'].owner_id, location.group_id) self.assertEqual(cases['location-owner-name'].owner_id, location.group_id) error_message = ImportErrors.DuplicateLocationName error_column_name = None self.assertIn(error_message, res['errors']) self.assertEqual(res['errors'][error_message][error_column_name]['rows'], [5]) error_message = ImportErrors.InvalidOwnerId self.assertIn(error_message, res['errors']) error_column_name = 'owner_id' self.assertEqual(res['errors'][error_message][error_column_name]['rows'], [6]) @run_with_all_backends def test_opened_on(self): case = self.factory.create_case() new_date = '2015-04-30T14:41:53.000000Z' with flag_enabled('BULK_UPLOAD_DATE_OPENED'): self.import_mock_file([ ['case_id', 'date_opened'], [case.case_id, new_date] ]) case = CaseAccessors(self.domain).get_case(case.case_id) self.assertEqual(case.opened_on, PhoneTime(parse_datetime(new_date)).done()) @run_with_all_backends def test_columns_and_rows_align(self): case_owner = CommCareUser.create(self.domain, 'username', 'pw') res = self.import_mock_file([ ['case_id', 'name', '', 'favorite_color', 'owner_id'], ['', 'Jeff', '', 'blue', case_owner._id], ['', 'Caroline', '', 'yellow', case_owner._id], ]) self.assertEqual(res['errors'], {}) case_ids = self.accessor.get_case_ids_in_domain() cases = {c.name: c for c in list(self.accessor.get_cases(case_ids))} self.assertEqual(cases['Jeff'].owner_id, case_owner._id) self.assertEqual(cases['Jeff'].get_case_property('favorite_color'), 'blue') self.assertEqual(cases['Caroline'].owner_id, case_owner._id) self.assertEqual(cases['Caroline'].get_case_property('favorite_color'), 'yellow')
class DeactivatedMobileWorkersTest(BaseCaseRuleTest): def setUp(self): super().setUp() delete_all_users() self.domain_obj = create_domain(self.domain) enable_usercase(self.domain) with trap_extra_setup(ConnectionError): self.es = get_es_new() initialize_index_and_mapping(self.es, CASE_SEARCH_INDEX_INFO) username = normalize_username("mobile_worker_1", self.domain) self.mobile_worker = CommCareUser.create(self.domain, username, "123", None, None) sync_user_cases(self.mobile_worker) self.checkin_case = CaseFactory(self.domain).create_case( case_type="checkin", owner_id=self.mobile_worker.get_id, update={"username": self.mobile_worker.raw_username}, ) send_to_elasticsearch( "case_search", transform_case_for_elasticsearch(self.checkin_case.to_json())) self.es.indices.refresh(CASE_SEARCH_INDEX_INFO.index) self.case_accessor = CaseAccessors(self.domain) def tearDown(self): FormProcessorTestUtils.delete_all_cases() delete_all_users() ensure_index_deleted(CASE_SEARCH_INDEX_INFO.index) super().tearDown() def test_associated_usercase_closed(self): usercase_ids = self.case_accessor.get_case_ids_in_domain( type=USERCASE_TYPE) for usercase_id in usercase_ids: CaseFactory(self.domain).close_case(usercase_id) usercase = self.case_accessor.get_case(usercase_id) send_to_elasticsearch( "case_search", transform_case_for_elasticsearch(usercase.to_json())) self.es.indices.refresh(CASE_SEARCH_INDEX_INFO.index) self.assertTrue(associated_user_cases_closed(self.checkin_case, None)) def test_custom_action(self): rule = create_empty_rule( self.domain, AutomaticUpdateRule.WORKFLOW_CASE_UPDATE, case_type="checkin", ) case_properties = { "assigned_to_primary_checkin_case_id": self.checkin_case.case_id, "is_assigned_primary": "foo", "assigned_to_primary_name": "bar", "assigned_to_primary_username": "******", } patient_case = CaseFactory(self.domain).create_case( case_type="patient", owner_id=self.mobile_worker.get_id, update=case_properties, ) other_patient_case = CaseFactory(self.domain).create_case( case_type="patient", owner_id=self.mobile_worker.get_id, update={"assigned_to_primary_checkin_case_id": "123"}, ) other_case = CaseFactory(self.domain).create_case( case_type="other", owner_id=self.mobile_worker.get_id, update={ "assigned_to_primary_checkin_case_id": self.checkin_case.case_id }, ) for case in [patient_case, other_patient_case, other_case]: send_to_elasticsearch( "case_search", transform_case_for_elasticsearch(case.to_json())) self.es.indices.refresh(CASE_SEARCH_INDEX_INFO.index) close_cases_assigned_to_checkin(self.checkin_case, rule) self.assertTrue( self.case_accessor.get_case(self.checkin_case.case_id).closed) patient_case = self.case_accessor.get_case(patient_case.case_id) self.assertTrue(patient_case.closed) for prop in case_properties: self.assertEqual(patient_case.get_case_property(prop), "") other_case = self.case_accessor.get_case(other_case.case_id) self.assertFalse(other_case.closed) self.assertEqual( other_case.get_case_property( "assigned_to_primary_checkin_case_id"), self.checkin_case.case_id, ) other_patient_case = self.case_accessor.get_case( other_patient_case.case_id) self.assertFalse(other_patient_case.closed) self.assertEqual( other_patient_case.get_case_property( "assigned_to_primary_checkin_case_id"), "123", )
def find_case_ids_by_type(self, domain, case_type): accessor = CaseAccessors(domain) case_ids = accessor.get_case_ids_in_domain(case_type) print(f"Found {len(case_ids)} {case_type} cases in {domain}") return case_ids