def import_patients_of_owner(requests, importer, domain_name, owner_id, location=None): try: openmrs_patients = get_openmrs_patients(requests, importer, location) except RequestException as err: requests.notify_exception( f'Unable to import patients for project space "{domain_name}" ' f'using {importer}: Error calling API: {err}') return except (KeyError, IndexError, TypeError, ValueError) as err: requests.notify_exception( f'Unable to import patients for project space "{domain_name}" ' f'using {importer}: Unexpected response format: {err}') return case_blocks = [] for i, patient in enumerate(openmrs_patients): case, error = importer_util.lookup_case( EXTERNAL_ID, str(patient[importer.external_id_column]), domain_name, importer.case_type) if error is None: case_block = get_updatepatient_caseblock(case, patient, importer) case_blocks.append(RowAndCase(i, case_block)) elif error == LookupErrors.NotFound: case_block = get_addpatient_caseblock(patient, importer, owner_id) case_blocks.append(RowAndCase(i, case_block))
def update_patient(repeater, patient_uuid): """ Fetch patient from OpenMRS, submit case update for all mapped case properties. .. NOTE:: OpenMRS UUID must be saved to "external_id" case property """ _assert( len(repeater.white_listed_case_types) == 1, 'Unable to update patients from OpenMRS unless a single case type is ' 'specified. domain: "{}". repeater: "{}".'.format( repeater.domain, repeater.get_id ) ) case_type = repeater.white_listed_case_types[0] patient = get_patient_by_uuid(repeater.requests, patient_uuid) case, error = importer_util.lookup_case( EXTERNAL_ID, patient_uuid, repeater.domain, case_type=case_type, ) if error == LookupErrors.NotFound: owner = get_one_commcare_user_at_location(repeater.domain, repeater.location_id) _assert( owner, 'No users found at location "{}" to own patients added from ' 'OpenMRS atom feed. domain: "{}". repeater: "{}".'.format( repeater.location_id, repeater.domain, repeater.get_id ) ) case_block = get_addpatient_caseblock(case_type, owner, patient, repeater) else: _assert( error != LookupErrors.MultipleResults, # Multiple cases matched to the same patient. # Could be caused by: # * The cases were given the same identifier value. It could # be user error, or case config assumed identifier was # unique but it wasn't. # * PatientFinder matched badly. # * Race condition where a patient was previously added to # both CommCare and OpenMRS. 'More than one case found matching unique OpenMRS UUID. ' 'domain: "{}". case external_id: "{}". repeater: "{}".'.format( repeater.domain, patient_uuid, repeater.get_id ) ) case_block = get_updatepatient_caseblock(case, patient, repeater) if case_block: submit_case_blocks( [case_block.as_string()], repeater.domain, xmlns=XMLNS_OPENMRS, device_id=OPENMRS_ATOM_FEED_DEVICE_ID + repeater.get_id, )
def update_patient(repeater, patient_uuid): """ Fetch patient from OpenMRS, submit case update for all mapped case properties. .. NOTE:: OpenMRS UUID must be saved to "external_id" case property """ if len(repeater.white_listed_case_types) != 1: raise ConfigurationError( _(f'{repeater.domain}: {repeater}: Error in settings: Unable to update ' f'patients from OpenMRS unless only one case type is specified.') ) case_type = repeater.white_listed_case_types[0] try: patient = get_patient_by_uuid(repeater.requests, patient_uuid) except (RequestException, ValueError) as err: raise OpenmrsException( _(f'{repeater.domain}: {repeater}: Error fetching Patient ' f'{patient_uuid!r}: {err}')) from err case, error = importer_util.lookup_case( EXTERNAL_ID, patient_uuid, repeater.domain, case_type=case_type, ) if error == LookupErrors.NotFound: if not repeater.openmrs_config.case_config.import_creates_cases: # We can't create cases via the Atom feed, just update them. # Nothing to do here. return default_owner: Optional[CommCareUser] = repeater.first_user case_block = get_addpatient_caseblock(case_type, default_owner, patient, repeater) elif error == LookupErrors.MultipleResults: # Multiple cases have been matched to the same patient. # Could be caused by: # * The cases were given the same identifier value. It could # be user error, or case config assumed identifier was # unique but it wasn't. # * PatientFinder matched badly. # * Race condition where a patient was previously added to # both CommCare and OpenMRS. raise DuplicateCaseMatch( _(f'{repeater.domain}: {repeater}: More than one case found ' f'matching unique OpenMRS UUID. case external_id: "{patient_uuid}"' )) else: case_block = get_updatepatient_caseblock(case, patient, repeater) if case_block: submit_case_blocks( [case_block.as_text()], repeater.domain, xmlns=XMLNS_OPENMRS, device_id=OPENMRS_ATOM_FEED_DEVICE_ID + repeater.get_id, )
def update_patient(repeater, patient_uuid): """ Fetch patient from OpenMRS, submit case update for all mapped case properties. .. NOTE:: OpenMRS UUID must be saved to "external_id" case property """ _assert( len(repeater.white_listed_case_types) == 1, 'Unable to update patients from OpenMRS unless a single case type is ' 'specified. domain: "{}". repeater: "{}".'.format( repeater.domain, repeater.get_id)) case_type = repeater.white_listed_case_types[0] patient = get_patient_by_uuid(repeater.requests, patient_uuid) case, error = importer_util.lookup_case( EXTERNAL_ID, patient_uuid, repeater.domain, case_type=case_type, ) if error == LookupErrors.NotFound: owner = get_one_commcare_user_at_location(repeater.domain, repeater.location_id) _assert( owner, 'No users found at location "{}" to own patients added from ' 'OpenMRS atom feed. domain: "{}". repeater: "{}".'.format( repeater.location_id, repeater.domain, repeater.get_id)) case_block = get_addpatient_caseblock(case_type, owner, patient, repeater) else: _assert( error != LookupErrors.MultipleResults, # Multiple cases matched to the same patient. # Could be caused by: # * The cases were given the same identifier value. It could # be user error, or case config assumed identifier was # unique but it wasn't. # * PatientFinder matched badly. # * Race condition where a patient was previously added to # both CommCare and OpenMRS. 'More than one case found matching unique OpenMRS UUID. ' 'domain: "{}". case external_id: "{}". repeater: "{}".'.format( repeater.domain, patient_uuid, repeater.get_id)) case_block = get_updatepatient_caseblock(case, patient, repeater) if case_block: submit_case_blocks( [case_block.as_text()], repeater.domain, xmlns=XMLNS_OPENMRS, device_id=OPENMRS_ATOM_FEED_DEVICE_ID + repeater.get_id, )
def import_patients_of_owner(requests, importer, domain_name, owner, location=None): openmrs_patients = get_openmrs_patients(requests, importer, location) case_blocks = [] for i, patient in enumerate(openmrs_patients): case, error = importer_util.lookup_case( EXTERNAL_ID, str(patient[importer.external_id_column]), domain_name, importer.case_type ) if error is None: case_block = get_updatepatient_caseblock(case, patient, importer) case_blocks.append(RowAndCase(i, case_block)) elif error == LookupErrors.NotFound: case_block = get_addpatient_caseblock(patient, importer, owner.user_id) case_blocks.append(RowAndCase(i, case_block))
def import_patients_of_owner(requests, importer, domain_name, owner_id, location=None): try: openmrs_patients = get_openmrs_patients(requests, importer, location) except RequestException as err: requests.notify_exception( f'Unable to import patients for project space "{domain_name}" ' f'using {importer}: Error calling API: {err}') return except (KeyError, IndexError, TypeError, ValueError) as err: requests.notify_exception( f'Unable to import patients for project space "{domain_name}" ' f'using {importer}: Unexpected response format: {err}') return case_blocks = [] for i, patient in enumerate(openmrs_patients): try: patient_id = str(patient[importer.external_id_column]) except KeyError: raise ConfigurationError( f'Error importing patients for project space "{importer.domain}" ' f'from OpenMRS Importer "{importer}": External ID column ' f'"{importer.external_id_column}" not found in patient data.') case, error = importer_util.lookup_case(EXTERNAL_ID, patient_id, domain_name, importer.case_type) if error is None: case_block = get_updatepatient_caseblock(case, patient, importer) case_blocks.append(RowAndCase(i, case_block)) elif error == LookupErrors.NotFound: case_block = get_addpatient_caseblock(patient, importer, owner_id) case_blocks.append(RowAndCase(i, case_block)) elif error == LookupErrors.MultipleResults: raise ConfigurationError( f'Error importing patients for project space "{importer.domain}" ' f'from OpenMRS Importer "{importer}": {importer.case_type}' f'.{EXTERNAL_ID} "{patient_id}" is not unique.') submit_case_blocks( [cb.case.as_text() for cb in case_blocks], domain_name, device_id=f'{OPENMRS_IMPORTER_DEVICE_ID_PREFIX}{importer.get_id}', xmlns=XMLNS_OPENMRS, )
def get_case( repeater: OpenmrsRepeater, patient_uuid: str, ) -> Union[CommCareCase, CommCareCaseSQL, None]: case_type = repeater.white_listed_case_types[0] case, error = importer_util.lookup_case( EXTERNAL_ID, patient_uuid, repeater.domain, case_type=case_type, ) if error == LookupErrors.MultipleResults: raise DuplicateCaseMatch( _(f'{repeater.domain}: {repeater}: More than one case found ' 'matching unique OpenMRS UUID. case external_id: ' f'"{patient_uuid}". ')) return case
def import_patients_of_owner(requests, importer, domain_name, owner, location=None): openmrs_patients = get_openmrs_patients(requests, importer, location) case_blocks = [] for i, patient in enumerate(openmrs_patients): case, error = importer_util.lookup_case( EXTERNAL_ID, str(patient[importer.external_id_column]), domain_name, importer.case_type ) if error is None: case_block = get_updatepatient_caseblock(case, patient, importer) case_blocks.append(RowAndCase(i, case_block)) elif error == LookupErrors.NotFound: case_block = get_addpatient_caseblock(patient, importer, owner.user_id) case_blocks.append(RowAndCase(i, case_block)) submit_case_blocks( [cb.case.as_string() for cb in case_blocks], domain_name, device_id='{}{}'.format(OPENMRS_IMPORTER_DEVICE_ID_PREFIX, importer.get_id), user_id=owner.user_id, xmlns=XMLNS_OPENMRS, )
def do_import(spreadsheet, config, domain, task=None, chunksize=CASEBLOCK_CHUNKSIZE, record_form_callback=None): match_count = created_count = too_many_matches = num_chunks = 0 errors = importer_util.ImportErrorDetail() user = CouchUser.get_by_user_id(config.couch_user_id, domain) username = user.username user_id = user._id # keep a cache of id lookup successes to help performance id_cache = {} name_cache = {} caseblocks = [] ids_seen = set() def _submit_caseblocks(domain, case_type, caseblocks): err = False if caseblocks: try: form, cases = submit_case_blocks( [cb.case.as_string() for cb in caseblocks], domain, username, user_id, device_id=__name__ + ".do_import", ) if form.is_error: errors.add(error=ImportErrors.ImportErrorMessage, row_number=form.problem) except Exception: err = True for row_number, case in caseblocks: errors.add(error=ImportErrors.ImportErrorMessage, row_number=row_number) else: if record_form_callback: record_form_callback(form.form_id) properties = set().union( *[set(c.dynamic_case_properties().keys()) for c in cases]) if case_type and len(properties): add_inferred_export_properties.delay( 'CaseImporter', domain, case_type, properties, ) else: _soft_assert = soft_assert(notify_admins=True) _soft_assert( len(properties) == 0, 'error adding inferred export properties in domain ' '({}): {}'.format(domain, ", ".join(properties))) return err row_count = spreadsheet.max_row for i, row in enumerate(spreadsheet.iter_row_dicts()): if task: set_task_progress(task, i, row_count) # skip first row (header row) if i == 0: continue search_id = importer_util.parse_search_id(config, row) fields_to_update = importer_util.populate_updated_fields(config, row) if not any(fields_to_update.values()): # if the row was blank, just skip it, no errors continue if config.search_field == 'external_id' and not search_id: # do not allow blank external id since we save this errors.add(ImportErrors.BlankExternalId, i + 1) continue external_id = fields_to_update.pop('external_id', None) parent_id = fields_to_update.pop('parent_id', None) parent_external_id = fields_to_update.pop('parent_external_id', None) parent_type = fields_to_update.pop('parent_type', config.case_type) parent_ref = fields_to_update.pop('parent_ref', 'parent') to_close = fields_to_update.pop('close', False) if any([ lookup_id and lookup_id in ids_seen for lookup_id in [search_id, parent_id, parent_external_id] ]): # clear out the queue to make sure we've processed any potential # cases we want to look up # note: these three lines are repeated a few places, and could be converted # to a function that makes use of closures (and globals) to do the same thing, # but that seems sketchier than just beeing a little RY _submit_caseblocks(domain, config.case_type, caseblocks) num_chunks += 1 caseblocks = [] ids_seen = set( ) # also clear ids_seen, since all the cases will now be in the database case, error = importer_util.lookup_case(config.search_field, search_id, domain, config.case_type) if case: if case.type != config.case_type: continue elif error == LookupErrors.NotFound: if not config.create_new_cases: continue
owner_id = user_id extras = {} if parent_id: try: parent_case = CaseAccessors(domain).get_case(parent_id) if parent_case.domain == domain: extras['index'] = { parent_ref: (parent_case.type, parent_id) } except ResourceNotFound: errors.add(ImportErrors.InvalidParentId, i + 1, 'parent_id') continue elif parent_external_id: parent_case, error = importer_util.lookup_case( 'external_id', parent_external_id, domain, parent_type) if parent_case: extras['index'] = { parent_ref: (parent_type, parent_case.case_id) } case_name = fields_to_update.pop('name', None) if BULK_UPLOAD_DATE_OPENED.enabled(domain): date_opened = fields_to_update.pop(CASE_TAG_DATE_OPENED, None) if date_opened: extras['date_opened'] = date_opened if not case: id = uuid.uuid4().hex
def import_encounter(repeater, encounter_uuid): # It's possible that an OpenMRS concept appears more than once in # form_configs. Use a defaultdict(list) so that earlier definitions # don't get overwritten by later ones: def fields_from_observations(observations, mappings): """ Traverse a tree of observations, and return the ones mapped to case properties. """ fields = {} for obs in observations: if obs['concept']['uuid'] in mappings: for mapping in mappings[obs['concept']['uuid']]: fields[mapping.case_property] = mapping.value.deserialize( obs['value']) if obs['groupMembers']: fields.update( fields_from_observations(obs['groupMembers'], mappings)) return fields response = repeater.requests.get( '/ws/rest/v1/bahmnicore/bahmniencounter/' + encounter_uuid, {'includeAll': 'true'}, raise_for_status=True) encounter = response.json() case_property_updates = fields_from_observations( encounter['observations'], repeater.observation_mappings) if case_property_updates: case_blocks = [] patient_uuid = encounter['patientUuid'] case_type = repeater.white_listed_case_types[0] case, error = importer_util.lookup_case( EXTERNAL_ID, patient_uuid, repeater.domain, case_type=case_type, ) if case: case_id = case.get_id elif error == LookupErrors.NotFound: # The encounter is for a patient that has not yet been imported patient = get_patient_by_uuid(repeater.requests, patient_uuid) owner = get_one_commcare_user_at_location(repeater.domain, repeater.location_id) case_block = get_addpatient_caseblock(case_type, owner, patient, repeater) case_blocks.append(case_block) case_id = case_block.case_id else: _assert( error != LookupErrors.MultipleResults, 'More than one case found matching unique OpenMRS UUID. ' 'domain: "{}". case external_id: "{}". repeater: "{}".'.format( repeater.domain, patient_uuid, repeater.get_id)) return case_blocks.append( CaseBlock( case_id=case_id, create=False, update=case_property_updates, )) submit_case_blocks( [cb.as_text() for cb in case_blocks], repeater.domain, xmlns=XMLNS_OPENMRS, device_id=OPENMRS_ATOM_FEED_DEVICE_ID + repeater.get_id, )
def do_import(spreadsheet, config, domain, task=None, chunksize=CASEBLOCK_CHUNKSIZE, record_form_callback=None): match_count = created_count = too_many_matches = num_chunks = 0 errors = importer_util.ImportErrorDetail() user = CouchUser.get_by_user_id(config.couch_user_id, domain) username = user.username user_id = user._id # keep a cache of id lookup successes to help performance id_cache = {} name_cache = {} caseblocks = [] ids_seen = set() track_load = case_load_counter("case_importer", domain) def _submit_caseblocks(domain, case_type, caseblocks): err = False if caseblocks: try: form, cases = submit_case_blocks( [cb.case.as_string().decode('utf-8') for cb in caseblocks], domain, username, user_id, device_id=__name__ + ".do_import", ) if form.is_error: errors.add( error=ImportErrors.ImportErrorMessage, row_number=form.problem ) except Exception: err = True for row_number, case in caseblocks: errors.add( error=ImportErrors.ImportErrorMessage, row_number=row_number ) else: if record_form_callback: record_form_callback(form.form_id) properties = set().union(*[set(c.dynamic_case_properties().keys()) for c in cases]) if case_type and len(properties): add_inferred_export_properties.delay( 'CaseImporter', domain, case_type, properties, ) else: _soft_assert = soft_assert(notify_admins=True) _soft_assert( len(properties) == 0, 'error adding inferred export properties in domain ' '({}): {}'.format(domain, ", ".join(properties)) ) return err row_count = spreadsheet.max_row for i, row in enumerate(spreadsheet.iter_row_dicts()): if task: set_task_progress(task, i, row_count) # skip first row (header row) if i == 0: continue search_id = importer_util.parse_search_id(config, row) fields_to_update = importer_util.populate_updated_fields(config, row) if not any(fields_to_update.values()): # if the row was blank, just skip it, no errors continue if config.search_field == 'external_id' and not search_id: # do not allow blank external id since we save this errors.add(ImportErrors.BlankExternalId, i + 1) continue external_id = fields_to_update.pop('external_id', None) parent_id = fields_to_update.pop('parent_id', None) parent_external_id = fields_to_update.pop('parent_external_id', None) parent_type = fields_to_update.pop('parent_type', config.case_type) parent_ref = fields_to_update.pop('parent_ref', 'parent') to_close = fields_to_update.pop('close', False) if any([lookup_id and lookup_id in ids_seen for lookup_id in [search_id, parent_id, parent_external_id]]): # clear out the queue to make sure we've processed any potential # cases we want to look up # note: these three lines are repeated a few places, and could be converted # to a function that makes use of closures (and globals) to do the same thing, # but that seems sketchier than just beeing a little RY _submit_caseblocks(domain, config.case_type, caseblocks) num_chunks += 1 caseblocks = [] ids_seen = set() # also clear ids_seen, since all the cases will now be in the database case, error = importer_util.lookup_case( config.search_field, search_id, domain, config.case_type ) track_load() if case: if case.type != config.case_type: continue elif error == LookupErrors.NotFound: if not config.create_new_cases: continue elif error == LookupErrors.MultipleResults: too_many_matches += 1 continue uploaded_owner_name = fields_to_update.pop('owner_name', None) uploaded_owner_id = fields_to_update.pop('owner_id', None) if uploaded_owner_name: # If an owner name was provided, replace the provided # uploaded_owner_id with the id of the provided group or owner try: uploaded_owner_id = importer_util.get_id_from_name(uploaded_owner_name, domain, name_cache) except SQLLocation.MultipleObjectsReturned: errors.add(ImportErrors.DuplicateLocationName, i + 1) continue if not uploaded_owner_id: errors.add(ImportErrors.InvalidOwnerName, i + 1, 'owner_name') continue if uploaded_owner_id: # If an owner_id mapping exists, verify it is a valid user # or case sharing group if importer_util.is_valid_id(uploaded_owner_id, domain, id_cache): owner_id = uploaded_owner_id id_cache[uploaded_owner_id] = True else: errors.add(ImportErrors.InvalidOwnerId, i + 1, 'owner_id') id_cache[uploaded_owner_id] = False continue else: # if they didn't supply an owner_id mapping, default to current # user owner_id = user_id extras = {} if parent_id: try: parent_case = CaseAccessors(domain).get_case(parent_id) track_load() if parent_case.domain == domain: extras['index'] = { parent_ref: (parent_case.type, parent_id) } except ResourceNotFound: errors.add(ImportErrors.InvalidParentId, i + 1, 'parent_id') continue elif parent_external_id: parent_case, error = importer_util.lookup_case( 'external_id', parent_external_id, domain, parent_type ) track_load() if parent_case: extras['index'] = { parent_ref: (parent_type, parent_case.case_id) } case_name = fields_to_update.pop('name', None) if BULK_UPLOAD_DATE_OPENED.enabled(domain): date_opened = fields_to_update.pop(CASE_TAG_DATE_OPENED, None) if date_opened: extras['date_opened'] = date_opened if not case: id = uuid.uuid4().hex if config.search_field == 'external_id': extras['external_id'] = search_id elif external_id: extras['external_id'] = external_id try: caseblock = CaseBlock( create=True, case_id=id, owner_id=owner_id, user_id=user_id, case_type=config.case_type, case_name=case_name or '', update=fields_to_update, **extras ) caseblocks.append(RowAndCase(i, caseblock)) created_count += 1 if external_id: ids_seen.add(external_id) except CaseBlockError: errors.add(ImportErrors.CaseGeneration, i + 1) else: if external_id: extras['external_id'] = external_id if uploaded_owner_id: extras['owner_id'] = owner_id if to_close == 'yes': extras['close'] = True if case_name is not None: extras['case_name'] = case_name try: caseblock = CaseBlock( create=False, case_id=case.case_id, update=fields_to_update, **extras ) caseblocks.append(RowAndCase(i, caseblock)) match_count += 1 except CaseBlockError: errors.add(ImportErrors.CaseGeneration, i + 1) # check if we've reached a reasonable chunksize # and if so submit if len(caseblocks) >= chunksize: _submit_caseblocks(domain, config.case_type, caseblocks) num_chunks += 1 caseblocks = [] # final purge of anything left in the queue if _submit_caseblocks(domain, config.case_type, caseblocks): match_count -= 1 num_chunks += 1 return { 'created_count': created_count, 'match_count': match_count, 'too_many_matches': too_many_matches, 'errors': errors.as_dict(), 'num_chunks': num_chunks, }
def import_encounter(repeater, encounter_uuid): # It's possible that an OpenMRS concept appears more than once in # form_configs. Use a defaultdict(list) so that earlier definitions # don't get overwritten by later ones: def fields_from_observations(observations, mappings): """ Traverse a tree of observations, and return the ones mapped to case properties. """ fields = {} for obs in observations: if obs['concept']['uuid'] in mappings: for mapping in mappings[obs['concept']['uuid']]: fields[mapping.case_property] = mapping.value.deserialize(obs['value']) if obs['groupMembers']: fields.update(fields_from_observations(obs['groupMembers'], mappings)) return fields response = repeater.requests.get( '/ws/rest/v1/bahmnicore/bahmniencounter/' + encounter_uuid, {'includeAll': 'true'}, raise_for_status=True ) encounter = response.json() case_property_updates = fields_from_observations(encounter['observations'], repeater.observation_mappings) if case_property_updates: case_blocks = [] patient_uuid = encounter['patientUuid'] case_type = repeater.white_listed_case_types[0] case, error = importer_util.lookup_case( EXTERNAL_ID, patient_uuid, repeater.domain, case_type=case_type, ) if case: case_id = case.get_id elif error == LookupErrors.NotFound: # The encounter is for a patient that has not yet been imported patient = get_patient_by_uuid(repeater.requests, patient_uuid) owner = get_one_commcare_user_at_location(repeater.domain, repeater.location_id) case_block = get_addpatient_caseblock(case_type, owner, patient, repeater) case_blocks.append(case_block) case_id = case_block.case_id else: _assert( error != LookupErrors.MultipleResults, 'More than one case found matching unique OpenMRS UUID. ' 'domain: "{}". case external_id: "{}". repeater: "{}".'.format( repeater.domain, patient_uuid, repeater.get_id ) ) return case_blocks.append(CaseBlock( case_id=case_id, create=False, update=case_property_updates, )) submit_case_blocks( [cb.as_string() for cb in case_blocks], repeater.domain, xmlns=XMLNS_OPENMRS, device_id=OPENMRS_ATOM_FEED_DEVICE_ID + repeater.get_id, )