def test_parent_lookups(self): parsed = parse_xpath("father/name = 'Mace'") # return all the cases who's parent (relationship named 'father') has case property 'name' = 'Mace' expected_filter = { "nested": { "path": "indices", "query": { "filtered": { "query": { "match_all": {}, }, "filter": { "and": ({ "terms": { "indices.referenced_id": [self.parent_case_id], } }, { "term": { "indices.identifier": "father" } }) } } } } } built_filter = build_filter_from_ast(self.domain, parsed) self.assertEqual(expected_filter, built_filter) self.assertEqual([self.child_case_id], CaseSearchES().filter(built_filter).values_list( '_id', flat=True))
def test_nested_parent_lookups(self): parsed = parse_xpath("father/mother/house = 'Tyrell'") expected_filter = { "nested": { "path": "indices", "query": { "filtered": { "query": { "match_all": {}, }, "filter": { "and": ({ "terms": { "indices.referenced_id": [self.parent_case_id], } }, { "term": { "indices.identifier": "father" } }) } } } } } built_filter = build_filter_from_ast(self.domain, parsed) self.assertEqual(expected_filter, built_filter) self.assertEqual([self.child_case_id], CaseSearchES().filter(built_filter).values_list( '_id', flat=True))
def handle(self, domain, migration_id, **options): query = CaseSearchES()\ .domain(domain)\ .case_property_query("created_by_migration", migration_id, "must", fuzzy=False) hits = query.run().hits case_ids = [hit['_id'] for hit in hits] for case_id in case_ids: print case_id if options['commit']: print "Deleting cases" for ids in chunked(case_ids, 100): CaseAccessors(domain).soft_delete_cases(list(ids)) print "Deletion finished"
def get_most_recent_case_type(domain): # gets most recently submitted case type in a domain query = (CaseSearchES().domain(domain).NOT( case_es.case_type(USER_LOCATION_OWNER_MAP_TYPE))) query = query.sort('modified_on', desc=True) result = query.size(1).values_list('type', flat=True) return result[0] if len(result) > 0 else None
def test_get_related_case_results(self): # Note that cases must be defined before other cases can reference them cases = [ { '_id': 'c1', 'case_type': 'monster', 'description': 'grandparent of first person' }, { '_id': 'c2', 'case_type': 'monster', 'description': 'parent of first person', 'index': { 'parent': ('monster', 'c1') } }, { '_id': 'c3', 'case_type': 'monster', 'description': 'parent of host' }, { '_id': 'c4', 'case_type': 'monster', 'description': 'host of second person', 'index': { 'parent': ('monster', 'c3') } }, { '_id': 'c5', 'description': 'first person', 'index': { 'parent': ('monster', 'c2') } }, { '_id': 'c6', 'description': 'second person', 'index': { 'host': ('monster', 'c4') } }, ] self._bootstrap_cases_in_es_for_domain(self.domain, cases) hits = CaseSearchES().domain(self.domain).case_type( self.case_type).run().hits cases = [wrap_case_search_hit(result) for result in hits] self.assertEqual({case.case_id for case in cases}, {'c5', 'c6'}) self._assert_related_case_ids(cases, set(), set()) self._assert_related_case_ids(cases, {"parent"}, {"c2"}) self._assert_related_case_ids(cases, {"host"}, {"c4"}) self._assert_related_case_ids(cases, {"parent/parent"}, {"c1"}) self._assert_related_case_ids(cases, {"host/parent"}, {"c3"}) self._assert_related_case_ids(cases, {"host", "parent"}, {"c2", "c4"}) self._assert_related_case_ids(cases, {"host", "parent/parent"}, {"c4", "c1"})
def initial_value(cls, request, domain): initial_value = super(CaseTypeReportFilter, cls).initial_value(request, domain) if initial_value is None: query = (CaseSearchES().domain(domain) .NOT(case_es.case_type(USER_LOCATION_OWNER_MAP_TYPE))) result = query.size(1).values_list('type', flat=True) initial_value = result[0] if len(result) > 0 else None return initial_value
def handle(self, domain, log_path, **options): commit = options['commit'] factory = CaseFactory(domain) logger.info("Starting {} migration on {} at {}".format( "real" if commit else "fake", domain, datetime.datetime.utcnow())) cases = (CaseSearchES().domain(domain).case_type("episode").scroll()) with open(log_path, "w") as f: for case in cases: case_props = { prop['key']: prop['value'] for prop in case['case_properties'] } treatment_status = None treatment_initiated = case_props.get('treatment_initiated') diagnosing_facility_id = case_props.get( 'diagnosing_facility_id') treatment_initiating_facility_id = case_props.get( 'treatment_initiating_facility_id') if treatment_initiated == 'yes_phi' and \ diagnosing_facility_id and treatment_initiating_facility_id and \ diagnosing_facility_id != treatment_initiating_facility_id: treatment_status = 'initiated_outside_facility' elif treatment_initiated == 'yes_phi' and \ diagnosing_facility_id and treatment_initiating_facility_id: treatment_status = 'initiated_first_line_treatment' elif treatment_initiated == 'yes_private': treatment_status = 'initiated_outside_rntcp' if treatment_status: case_id = case['_id'] f.write(case_id + "\n") logger.info(case_id) case_structure = CaseStructure( case_id=case_id, walk_related=False, attrs={ "create": False, "update": { "treatment_status": treatment_status, "updated_by_migration": "enikshay_2b_treatment_status_fix", }, }, ) if commit: try: factory.create_or_update_case(case_structure) except NikshayLocationNotFound: pass logger.info("Migration finished at {}".format( datetime.datetime.utcnow()))
def _get_case_search_es_modified_dates(case_ids): results = (CaseSearchES( for_export=True).case_ids(case_ids).values_list( '_id', 'server_modified_on', 'domain')) return { _id: (iso_string_to_datetime(server_modified_on) if server_modified_on else None, domain) for _id, server_modified_on, domain in results }
def test_get_related_case_results_duplicates(self): """Test that `get_related_cases` does not include any cases that are in the initial set or are duplicates of others already found.""" # d1 :> c2 > c1 > a1 # d1 > c1 # Search for case type 'c' # - initial results c1, c2 # - related lookups (parent, parent/parent) yield a1, c1, a1 # - child lookups yield c2, d1 # - (future) extension lookups yield d1 cases = [ { '_id': 'a1', 'case_type': 'a' }, { '_id': 'c1', 'case_type': 'c', 'index': { 'parent': ('a', 'a1'), } }, { '_id': 'c2', 'case_type': 'c', 'index': { 'parent': ('c', 'c1'), } }, { '_id': 'd1', 'case_type': 'd', 'index': { 'parent': ('c', 'c1'), 'host': ('c', 'c2'), } }, ] self._bootstrap_cases_in_es_for_domain(self.domain, cases) hits = CaseSearchES().domain(self.domain).case_type("c").run().hits cases = [wrap_case_search_hit(result) for result in hits] self.assertEqual({case.case_id for case in cases}, {'c1', 'c2'}) with patch("corehq.apps.case_search.utils.get_related_case_relationships", return_value={"parent", "parent/parent"}), \ patch("corehq.apps.case_search.utils.get_child_case_types", return_value={"c", "d"}), \ patch("corehq.apps.case_search.utils.get_app_cached"): cases = get_related_cases(_QueryHelper(self.domain), None, {"c"}, cases, None) case_ids = Counter([case.case_id for case in cases]) self.assertEqual( set(case_ids), {"a1", "d1"}) # c1, c2 excluded since they are in the initial list self.assertEqual(max(case_ids.values()), 1, case_ids) # no duplicates
def delete_case_search_cases(domain): if domain is None or isinstance(domain, dict): raise TypeError("Domain attribute is required") query = {'query': CaseSearchES().domain(domain).raw_query['query']} get_es_new().delete_by_query( index=CASE_SEARCH_INDEX, doc_type=CASE_ES_TYPE, body=query, )
def test_geopoint_property(self): CaseSearchConfig.objects.get_or_create(pk=self.domain, enabled=True) domains_needing_search_index.clear() self._make_data_dictionary( gps_properties=['coords', 'short_coords', 'other_coords']) case = self._make_case( case_properties={ 'coords': '-33.8561 151.2152 0 0', 'short_coords': '-33.8561 151.2152', 'other_coords': '42 Wallaby Way', 'not_coords': '-33.8561 151.2152 0 0', }) CaseSearchReindexerFactory(domain=self.domain).build().reindex() self.elasticsearch.indices.refresh(CASE_SEARCH_INDEX) es_case = CaseSearchES().doc_id(case.case_id).run().hits[0] self.assertEqual( self._get_prop(es_case['case_properties'], 'coords'), { 'key': 'coords', 'value': '-33.8561 151.2152 0 0', 'geopoint_value': { 'lat': -33.8561, 'lon': 151.2152 }, }, ) self.assertEqual( self._get_prop(es_case['case_properties'], 'short_coords'), { 'key': 'short_coords', 'value': '-33.8561 151.2152', 'geopoint_value': { 'lat': -33.8561, 'lon': 151.2152 }, }, ) self.assertEqual( self._get_prop(es_case['case_properties'], 'other_coords'), # The value here isn't a valid geopoint { 'key': 'other_coords', 'value': '42 Wallaby Way', 'geopoint_value': None }, ) self.assertEqual( self._get_prop(es_case['case_properties'], 'not_coords'), # This isn't a geopoint property in the data dictionary { 'key': 'not_coords', 'value': '-33.8561 151.2152 0 0' }, )
def test_case_search_reindexer(self): es = get_es_new() FormProcessorTestUtils.delete_all_cases() case = _create_and_save_a_case() ensure_index_deleted(CASE_SEARCH_INDEX) # With case search not enabled, case should not make it to ES CaseSearchConfig.objects.all().delete() call_command('ptop_reindexer_v2', 'case-search') es.indices.refresh(CASE_SEARCH_INDEX) # as well as refresh the index self._assert_es_empty(esquery=CaseSearchES()) # With case search enabled, it should get indexed CaseSearchConfig.objects.create(domain=self.domain, enabled=True) self.addCleanup(CaseSearchConfig.objects.all().delete) call_command('ptop_reindexer_v2', 'case-search') es.indices.refresh(CASE_SEARCH_INDEX) # as well as refresh the index self._assert_case_is_in_es(case, esquery=CaseSearchES())
def test_case_search_reindexer(self): es = get_es_new() FormProcessorTestUtils.delete_all_cases() case = _create_and_save_a_case() ensure_index_deleted(CASE_SEARCH_INDEX) # With case search not enabled, case should not make it to ES CaseSearchConfig.objects.all().delete() domains_needing_search_index.clear() reindex_and_clean('case-search') es.indices.refresh(CASE_SEARCH_INDEX) # as well as refresh the index self._assert_es_empty(esquery=CaseSearchES()) # With case search enabled, it should get indexed with mock.patch('corehq.pillows.case_search.domains_needing_search_index', mock.MagicMock(return_value=[self.domain])): reindex_and_clean('case-search') es.indices.refresh(CASE_SEARCH_INDEX) # as well as refresh the index self._assert_case_is_in_es(case, esquery=CaseSearchES())
def delete_case_search_cases(domain): if domain is None or isinstance(domain, dict): raise TypeError("Domain attribute is required") get_es_new().indices.refresh(CASE_SEARCH_INDEX) case_ids = CaseSearchES().domain(domain).values_list('_id', flat=True) ElasticsearchInterface(get_es_new()).bulk_ops([{ "_op_type": "delete", "_index": CASE_SEARCH_INDEX, "_type": CASE_ES_TYPE, "_id": case_id, } for case_id in case_ids])
def test_delete(self): case_query = CaseSearchES().domain( self.domain).doc_type("CommCareCase") # Import some cases all_case_ids = [] with self.drtb_import(IMPORT_ROWS, "mumbai", commit=True) as (_, result_rows): for row in result_rows: case_ids = row.get("case_ids", "") self.assertTrue( case_ids, "No case ids, got this error instead: {}".format( row.get("exception"))) all_case_ids.extend([x for x in case_ids.split(",") if x]) # Create a case unrelated to this import case = CaseFactory(self.domain).create_case( case_type="person", update={"created_by_migration": "bar"}) all_case_ids.append(case._id) # Send cases to ES self._refersh_es(all_case_ids) # Confirm that cases are in ES # 30 cases per person = 25 resistance + 1 drtb + 1 sdps + 1 person + 1 episode + 1 occurrence self.assertEqual(case_query.count(), (2 * 30) + 1) # Run the deletion script call_command('delete_imported_drtb_cases', self.domain, "foo", "--commit") # Confirm that the cases have been deleted self._refersh_es(all_case_ids) self.es_client.indices.refresh(CASE_SEARCH_INDEX_INFO.index) self.assertEqual(case_query.count(), 1)
def handle(self, domain, log_path, **options): commit = options['commit'] factory = CaseFactory(domain) logger.info("Starting {} migration on {} at {}".format( "real" if commit else "fake", domain, datetime.datetime.utcnow())) cases = (CaseSearchES().domain(domain).case_type( "episode").case_property_query("case_version", "20", "must").scroll()) with open(log_path, "w") as f: for case in cases: case_props = { prop['key']: prop['value'] for prop in case['case_properties'] } referred_by_id = case_props.get('referred_by_id') updated_by_migration = case_props.get('updated_by_migration') if ((updated_by_migration == 'enikshay_2b_case_properties' or updated_by_migration == 'enikshay_2b_treatment_status_fix') and referred_by_id): case_id = case['_id'] f.write(case_id + "\n") logger.info(case_id) case_structure = CaseStructure( case_id=case_id, walk_related=False, attrs={ "create": False, "update": { "referred_outside_enikshay_by_id": referred_by_id, "updated_by_migration": "enikshay_2b_referred_by_id_fix", }, }, ) if commit: factory.create_or_update_case(case_structure) logger.info("Migration finished at {}".format( datetime.datetime.utcnow()))
def _search_results(self, paged=True, location_ids=None): cs = (CaseSearchES().domain(self.domain).case_type(CASE_TYPE_VOUCHER)) if location_ids: cs = cs.case_property_query('voucher_fulfilled_by_location_id', " ".join(location_ids)) if self.voucher_state: cs = cs.case_property_query('state', self.voucher_state) if self.voucher_id: cs = cs.case_property_query('voucher_id', self.voucher_id) if paged: cs = cs.start(self.pagination.start).size(self.pagination.count) return cs.run()
def _assert_case_in_es(self, domain, case): # confirm change made it to elasticserach self.elasticsearch.indices.refresh(CASE_SEARCH_INDEX) results = CaseSearchES().run() self.assertEqual(1, results.total) case_doc = results.hits[0] self.assertEqual(domain, case_doc['domain']) self.assertEqual(case.case_id, case_doc['_id']) self.assertEqual(case.name, case_doc['name']) # Confirm change contains case_properties self.assertItemsEqual(list(case_doc['case_properties'][0]), ['key', 'value']) for case_property in case_doc['case_properties']: key = case_property['key'] self.assertEqual(case.get_case_property(key), case_property['value'])
def print_totals(self, domains): max_space = '\t' * (int(max([len(x) for x in domains]) / 8) + 2) header = 'Domain{}CaseES\t\tCaseSearchES\n'.format(max_space) divider = '{}\n'.format('*' * (len(header) + len(max_space) * 8)) self.stdout.write(divider) self.stdout.write(header) self.stdout.write(divider) for domain in domains: spacer = max_space[int(len(domain) / 8):] total_case_es = CaseES().domain(domain).count() total_case_search = CaseSearchES().domain(domain).count() self.stdout.write('{domain}{spacer}{case_es}\t\t{case_search}\n'.format( domain=domain, spacer=spacer, case_es=total_case_es, case_search=total_case_search, ))
def delete_exploded_cases(domain, explosion_id, task=None): if not explosion_id: raise Exception( "explosion_id is falsy, aborting rather than deleting all cases") if task: DownloadBase.set_progress(delete_exploded_case_task, 0, 0) query = (CaseSearchES().domain(domain).case_property_query( "cc_explosion_id", explosion_id)) case_ids = query.values_list('_id', flat=True) if task: DownloadBase.set_progress(delete_exploded_case_task, 0, len(case_ids)) ledger_accessor = LedgerAccessorSQL deleted_form_ids = set() num_deleted_ledger_entries = 0 for id in case_ids: ledger_form_ids = { tx.form_id for tx in ledger_accessor.get_ledger_transactions_for_case(id) } for form_id in ledger_form_ids: ledger_accessor.delete_ledger_transactions_for_form([id], form_id) num_deleted_ledger_entries += ledger_accessor.delete_ledger_values(id) new_form_ids = set( CommCareCase.objects.get_case_xform_ids(id)) - deleted_form_ids XFormInstance.objects.soft_delete_forms(domain, list(new_form_ids)) deleted_form_ids |= new_form_ids completed = 0 for ids in chunked(case_ids, 100): CommCareCase.objects.soft_delete_cases(domain, list(ids)) if task: completed += len(ids) DownloadBase.set_progress(delete_exploded_case_task, completed, len(case_ids)) return { 'messages': [ "Successfully deleted {} cases".format(len(case_ids)), "Successfully deleted {} forms".format(len(deleted_form_ids)), "Successfully deleted {} ledgers".format( num_deleted_ledger_entries), ] }
def test_case_pillow_error_in_case_es(self): self.assertEqual(0, PillowError.objects.filter(pillow='case-pillow').count()) with patch('corehq.pillows.case_search.domain_needs_search_index', return_value=True), \ patch('corehq.pillows.case.transform_case_for_elasticsearch') as case_transform, \ patch('corehq.pillows.case_search.transform_case_for_elasticsearch') as case_search_transform: case_transform.side_effect = Exception('case_transform error') case_search_transform.side_effect = Exception('case_search_transform error') case_id, case_name = self._create_case_and_sync_to_es() # confirm change did not make it to case search index results = CaseSearchES().run() self.assertEqual(0, results.total) # confirm change did not make it to case index results = CaseES().run() self.assertEqual(0, results.total) self.assertEqual(1, PillowError.objects.filter(pillow='case-pillow').count())
def delete_exploded_cases(domain, explosion_id, task=None): if task: DownloadBase.set_progress(delete_exploded_case_task, 0, 0) query = (CaseSearchES().domain(domain).case_property_query( "cc_explosion_id", explosion_id)) case_ids = query.values_list('_id', flat=True) if task: DownloadBase.set_progress(delete_exploded_case_task, 0, len(case_ids)) case_accessor = CaseAccessors(domain) form_accessor = FormAccessors(domain) ledger_accessor = LedgerAccessorSQL deleted_form_ids = set() num_deleted_ledger_entries = 0 for id in case_ids: ledger_form_ids = { tx.form_id for tx in ledger_accessor.get_ledger_transactions_for_case(id) } for form_id in ledger_form_ids: ledger_accessor.delete_ledger_transactions_for_form([id], form_id) num_deleted_ledger_entries += ledger_accessor.delete_ledger_values(id) new_form_ids = set( case_accessor.get_case_xform_ids(id)) - deleted_form_ids form_accessor.soft_delete_forms(list(new_form_ids)) deleted_form_ids |= new_form_ids completed = 0 for ids in chunked(case_ids, 100): case_accessor.soft_delete_cases(list(ids)) if task: completed += len(ids) DownloadBase.set_progress(delete_exploded_case_task, completed, len(case_ids)) return { 'messages': [ "Successfully deleted {} cases".format(len(case_ids)), "Successfully deleted {} forms".format(len(deleted_form_ids)), "Successfully deleted {} ledgers".format( num_deleted_ledger_entries), ] }
def handle(self, domain, **options): repeater_id = 'b4e19fd859f852871703e8e32a1764a9' # 99dots update repeater_type = 'NinetyNineDotsUpdatePatientRepeater' # 99dots update cs = ( CaseSearchES().domain(domain) .regexp_case_property_query('secondary_phone', '[0-9]+') .case_type('person') .case_property_query('enrolled_in_private', 'true') ) person_case_ids = cs.values_list('_id', flat=True) enabled_ids = set() for person_id in person_case_ids: try: episode_case = get_open_episode_case_from_person('enikshay', person_id) if episode_case.get_case_property('dots_99_enabled') == 'true': enabled_ids.add(person_id) except ENikshayCaseNotFound: pass with open('99dots_phone.csv', 'w') as f: writer = csv.writer(f) writer.writerow([ 'payload_id', 'state', 'payload', 'attempt message' ]) for payload_id in enabled_ids: repeat_record = RepeatRecord( repeater_id=repeater_id, repeater_type=repeater_type, domain=domain, next_check=datetime.utcnow(), payload_id=payload_id ) if options['commit']: repeat_record.fire() writer.writerow([ repeat_record.payload_id, repeat_record.state, repeat_record.get_payload(), repeat_record.attempts[-1].message, ])
def delete_case_search_cases(domain): if domain is None or isinstance(domain, dict): raise TypeError("Domain attribute is required") get_es_new().indices.refresh(CASE_SEARCH_INDEX) case_ids = CaseSearchES().domain(domain).values_list('_id', flat=True) op_kwargs = { "_op_type": "delete", "_index": CASE_SEARCH_INDEX_INFO.alias, "_type": CASE_ES_TYPE, } if settings.ELASTICSEARCH_MAJOR_VERSION == 7: op_kwargs.pop('_type') ElasticsearchInterface(get_es_new()).bulk_ops([{ **op_kwargs, "_id": case_id, } for case_id in case_ids])
def test_nested_sort(self): """https://www.elastic.co/guide/en/elasticsearch/reference/1.7/search-request-sort.html#_sorting_within_nested_objects """ path = "case_properties" field_name = 'value' key = "dob" sort_filter = {"term": {"case_properties.key.exact": key}} expected = [{ "case_properties.value": { "order": "asc", "nested_filter": sort_filter } }] self.assertEqual( expected, CaseSearchES().nested_sort(path, field_name, sort_filter).raw_query['sort'])
def handle(self, domain, log_path, **options): commit = options['commit'] factory = CaseFactory(domain) logger.info("Starting {} migration on {} at {}".format( "real" if commit else "fake", domain, datetime.datetime.utcnow())) cases = (CaseSearchES().domain(domain).case_type( "test").case_property_query("updated_by_migration", "enikshay_2b_case_properties", "must").run().hits) with open(log_path, "w") as f: for case in cases: case_props = { prop['key']: prop['value'] for prop in case['case_properties'] } if (case_props.get('purpose_of_testing') == "follow_up_dstb" and case_props.get("rft_general", "") in ("", None)): case_id = case['_id'] f.write(case_id + "\n") logger.info(case_id) case_structure = CaseStructure( case_id=case_id, walk_related=False, attrs={ "create": False, "update": { "rft_general": "follow_up_dstb", "updated_by_migration": "enikshay_2b_reason_for_test_fix", }, }, ) if commit: factory.create_or_update_case(case_structure) logger.info("Migration finished at {}".format( datetime.datetime.utcnow()))
def _assert_case_in_es(self, domain, case): # confirm change made it to elasticsearch self.elasticsearch.indices.refresh(CASE_SEARCH_INDEX) results = CaseSearchES().run() self.assertEqual(1, results.total) case_doc = results.hits[0] self.assertEqual(domain, case_doc['domain']) self.assertEqual(case.case_id, case_doc['_id']) self.assertEqual(case.name, case_doc['name']) # Confirm change contains case_properties self.assertItemsEqual(list(case_doc['case_properties'][0]), ['key', 'value']) for case_property in case_doc['case_properties']: key = case_property['key'] try: self.assertEqual( SPECIAL_CASE_PROPERTIES_MAP[key].value_getter( case.to_json()), case_property['value'], ) except KeyError: self.assertEqual(case.get_case_property(key), case_property['value'])
def test_regular_sort(self): field_name = "foo" expected = [{field_name: {'order': 'asc'}}] self.assertEqual(expected, CaseSearchES().sort(field_name).raw_query['sort'])
def _assert_index_empty(self): self.elasticsearch.indices.refresh(CASE_SEARCH_INDEX) results = CaseSearchES().run() self.assertEqual(0, results.total)
def handle(self, commit, domain, log_path, **options): commit = commit factory = CaseFactory(domain) headers = [ 'case_id', 'testing_facility_id', 'datamigration_testing_facility_id', ] print("Starting {} migration on {} at {}".format( "real" if commit else "fake", domain, datetime.datetime.utcnow())) case_ids = [ hit['_id'] for hit in (CaseSearchES().domain( domain).case_type("test").case_property_query( "result_recorded", "yes", "must").run().hits) ] failed_tests = [] with open(log_path, "w") as log_file: writer = csv.writer(log_file) writer.writerow(headers) for test in CaseAccessors(domain=domain).iter_cases(case_ids): try: if test.get_case_property('datamigration_testing_facility_id') != 'yes' \ and not test.get_case_property('testing_facility_id'): form_data = self._get_result_recorded_form(test) microscopy_id = self._get_path([ 'update_test_result', 'microscopy', 'ql_testing_facility_details', 'default_dmc_id' ], form_data) cbnaat_id = self._get_path([ 'update_test_result', 'cbnaat', 'ql_testing_facility_details', 'default_cdst_id' ], form_data) testing_facility_id = microscopy_id or cbnaat_id if testing_facility_id: writer.writerow( [test.case_id, testing_facility_id, "yes"]) print('Updating {}...'.format(test.case_id)) case_structure = CaseStructure( case_id=test.case_id, walk_related=False, attrs={ "create": False, "update": { "datamigration_testing_facility_id": "yes", "testing_facility_id": testing_facility_id, }, }, ) if commit: factory.create_or_update_case(case_structure) else: writer.writerow( [test.case_id, testing_facility_id, "no"]) except: failed_tests.append(test.case_id) print("Migration finished at {}".format(datetime.datetime.utcnow())) print("Failed tests: {}".format(failed_tests))