def test_simple_template_mapping(self): mapping = { "attributes": { "last_name": { "p1": ["sn"], "p2": ["sn"] }, "first_name": { "p1": ["givenName"], "p2": ["givenName"] }, "name": { "p2": ["cn"] } }, "template_attributes": { "name": { "p2": ["${first_name[0]} ${last_name[0]}"] } } } converter = AttributeMapper(mapping) internal_repr = converter.to_internal("p2", {"givenName": ["Valfrid"], "sn": ["Lindeman"]}) assert "name" in internal_repr assert len(internal_repr["name"]) == 1 assert internal_repr["name"][0] == "Valfrid Lindeman" external_repr = converter.from_internal("p2", internal_repr) assert external_repr["cn"][0] == "Valfrid Lindeman"
def test_template_attribute_with_multiple_templates_tries_them_all_templates(self): mapping = { "attributes": { "last_name": { "p1": ["sn"], }, "first_name": { "p1": ["givenName"], }, "name": { "p1": ["cn"] } }, "template_attributes": { "name": { "p1": ["${first_name[0]} ${last_name[0]}", "${unknown[0]} ${unknown[1]}", "${first_name[1]} ${last_name[1]}", "${foo} ${bar}"] } } } converter = AttributeMapper(mapping) data = {"sn": ["Surname1", "Surname2"], "givenName": ["Given1", "Given2"], "cn": ["Common Name"]} internal_repr = converter.to_internal("p1", data) assert len(internal_repr["name"]) == 2 assert internal_repr["name"][0] == "Given1 Surname1" assert internal_repr["name"][1] == "Given2 Surname2"
def test_template_attribute_preserves_existing_attribute_if_template_cant_be_rendered(self): mapping = { "attributes": { "last_name": { "p1": ["sn"], }, "first_name": { "p1": ["givenName"], }, "name": { "p1": ["cn"] } }, "template_attributes": { "name": { "p1": ["${unknown[0]} ${last_name[0]}"] } } } converter = AttributeMapper(mapping) data = {"sn": ["Surname"], "givenName": ["Given"], "cn": ["Common Name"]} internal_repr = converter.to_internal("p1", data) assert len(internal_repr["name"]) == 1 assert internal_repr["name"][0] == "Common Name"
def __init__(self, config: Mapping[str, Any], internal_attributes: Dict[str, Any], *args, **kwargs): super().__init__(*args, **kwargs) self.config = Config(**config) # Setup databases self.eduid_userdb = UserDB(db_uri=self.config.mongo_uri, db_name='eduid_scimapi') logger.info(f'Connected to eduid db: {self.eduid_userdb}') # TODO: Implement real 'data owner' to database lookup data_owner = 'eduid.se' _owner = data_owner.replace('.', '_') # replace dots with underscores coll = f'{_owner}__users' # TODO: rename old collection and remove this if data_owner == 'eduid.se': coll = 'profiles' self._userdbs = { 'eduid.se': ScimApiUserDB(db_uri=self.config.mongo_uri, collection=coll) } self.converter = AttributeMapper(internal_attributes) # Get the internal attribute name for the eduPersonPrincipalName that will be # used to find users in the SCIM database _int = self.converter.to_internal( 'saml', {'eduPersonPrincipalName': 'something'}) self.ext_id_attr = list(_int.keys())[0] logger.debug( f'SCIM externalId internal attribute name: {self.ext_id_attr}')
def test_template_attribute_overrides_existing_attribute(self): mapping = { "attributes": { "last_name": { "p1": ["sn"], }, "first_name": { "p1": ["givenName"], }, "name": { "p1": ["cn"] } }, "template_attributes": { "name": { "p1": ["${first_name[0]} ${last_name[0]}"] } } } converter = AttributeMapper(mapping) data = {"sn": ["Surname"], "givenName": ["Given"], "cn": ["Common Name"]} internal_repr = converter.to_internal("p1", data) external_repr = converter.from_internal("p1", internal_repr) assert len(internal_repr["name"]) == 1 assert internal_repr["name"][0] == "Given Surname" assert external_repr["cn"][0] == "Given Surname"
def test_scoped_template_mapping(self): mapping = { "attributes": { "unscoped_affiliation": { "p1": ["eduPersonAffiliation"] }, "uid": { "p1": ["eduPersonPrincipalName"], }, "affiliation": { "p1": ["eduPersonScopedAffiliation"] } }, "template_attributes": { "affiliation": { "p1": ["${unscoped_affiliation[0]}@${uid[0] | scope}"] } } } converter = AttributeMapper(mapping) internal_repr = converter.to_internal("p1", { "eduPersonAffiliation": ["student"], "eduPersonPrincipalName": ["*****@*****.**"]}) assert "affiliation" in internal_repr assert len(internal_repr["affiliation"]) == 1 assert internal_repr["affiliation"][0] == "*****@*****.**"
def test_from_internal_with_unknown_profile(self): mapping = { "attributes": { "mail": { "foo": ["email"], }, }, } converter = AttributeMapper(mapping) external_repr = converter.from_internal("bar", {"mail": "bob"}) assert external_repr == {}
def test_to_internal_filter_with_unknown_profile(self): mapping = { "attributes": { "mail": { "foo": ["email"], } } } converter = AttributeMapper(mapping) filter = converter.to_internal_filter("bar", ["email"]) assert filter == []
def test_to_internal_with_unknown_attribute_profile(self): mapping = { "attributes": { "mail": { "foo": ["email"], }, } } converter = AttributeMapper(mapping) internal_repr = converter.to_internal("bar", {"email": ["*****@*****.**"]}) assert internal_repr == {}
def test_to_internal_with_missing_attribute_value(self): mapping = { "attributes": { "mail": { "p1": ["emailaddress"], }, } } converter = AttributeMapper(mapping) internal_repr = converter.to_internal("p1", {}) assert not internal_repr
def test_to_internal_same_attribute_value_from_list_and_single_value(self, attribute_value): mapping = { "attributes": { "mail": { "foo": ["email"], }, }, } converter = AttributeMapper(mapping) internal_repr = converter.to_internal("foo", attribute_value) assert internal_repr["mail"] == ["*****@*****.**"]
def test_map_one_source_attribute_to_multiple_internal_attributes(self): mapping = { "attributes": { "mail": { "p1": ["email"], }, "identifier": { "p1": ["email"], }, }, } converter = AttributeMapper(mapping) internal_repr = converter.to_internal("p1", {"email": ["*****@*****.**"]}) assert internal_repr == {"mail": ["*****@*****.**"], "identifier": ["*****@*****.**"]}
def test_to_internal_filter(self): mapping = { "attributes": { "mail": { "p1": ["email"], }, "identifier": { "p1": ["uid"], }, }, } converter = AttributeMapper(mapping) filter = converter.to_internal_filter("p1", ["uid", "email"]) assert Counter(filter) == Counter(["mail", "identifier"])
def test_to_internal_filter_profile_missing_attribute_mapping(self): mapping = { "attributes": { "mail": { "foo": ["email"], }, "id": { "foo": ["id"], "bar": ["uid"], } }, } converter = AttributeMapper(mapping) filter = converter.to_internal_filter("bar", ["email", "uid"]) assert filter == ["id"] # mail should not included since its missing in 'bar' profile
def insert_user_in_user_db(self, frontend, user_id): user_attributes = AttributeMapper(frontend.internal_attributes).to_internal( "saml", USERS["testuser1"] ) frontend.user_db[user_id] = frontend.converter.from_internal( "openid", user_attributes )
def test_respect_sp_entity_categories(self, context, entity_category, entity_category_module, expected_attributes, idp_conf, sp_conf, internal_response): idp_metadata_str = create_metadata_from_config_dict(idp_conf) idp_conf["service"]["idp"]["policy"]["default"]["entity_categories"] = [entity_category_module] if all(entity_category): # don't insert empty entity category sp_conf["entity_category"] = entity_category if entity_category == [COCO]: sp_conf["service"]["sp"]["required_attributes"] = expected_attributes expected_attributes_in_all_entity_categories = list( itertools.chain(swamid.RELEASE[""], edugain.RELEASE[COCO], refeds.RELEASE[RESEARCH_AND_SCHOLARSHIP], swamid.RELEASE[(RESEARCH_AND_EDUCATION, EU)], swamid.RELEASE[(RESEARCH_AND_EDUCATION, HEI)], swamid.RELEASE[(RESEARCH_AND_EDUCATION, NREN)], swamid.RELEASE[SFS_1993_1153])) attribute_mapping = {} for expected_attribute in expected_attributes_in_all_entity_categories: attribute_mapping[expected_attribute.lower()] = {"saml": [expected_attribute]} internal_attributes = dict(attributes=attribute_mapping) samlfrontend = self.setup_for_authn_req(context, idp_conf, sp_conf, internal_attributes=internal_attributes) user_attributes = {k: "foo" for k in expected_attributes_in_all_entity_categories} internal_response.attributes = AttributeMapper(internal_attributes).to_internal("saml", user_attributes) internal_response.requester = sp_conf["entityid"] resp = self.get_auth_response(samlfrontend, context, internal_response, sp_conf, idp_metadata_str) assert Counter(resp.ava.keys()) == Counter(expected_attributes)
def internal_response(self, idp_conf): auth_info = AuthenticationInformation(PASSWORD, "2015-09-30T12:21:37Z", idp_conf["entityid"]) internal_response = InternalData(auth_info=auth_info) internal_response.attributes = AttributeMapper( INTERNAL_ATTRIBUTES).to_internal("saml", USERS["testuser1"]) return internal_response
def test_to_internal_profile_missing_attribute_mapping(self): mapping = { "attributes": { "mail": { "foo": ["email"], }, "id": { "foo": ["id"], "bar": ["uid"], } }, } converter = AttributeMapper(mapping) internal_repr = converter.to_internal("bar", {"email": ["*****@*****.**"], "uid": ["uid"]}) assert "mail" not in internal_repr # no mapping for the 'mail' attribute in the 'bar' profile assert internal_repr["id"] == ["uid"]
def test_mapping_to_nested_attribute(self): mapping = { "attributes": { "address": { "openid": ["address.formatted"], "saml": ["postaladdress"] }, }, } data = { "postaladdress": ["100 Universal City Plaza, Hollywood CA 91608, USA"] } converter = AttributeMapper(mapping) internal_repr = converter.to_internal("saml", data) external_repr = converter.from_internal("openid", internal_repr) assert external_repr["address"]["formatted"] == data["postaladdress"]
def setup_for_authn_response(self, context, frontend, auth_req): context.state[frontend.name] = {"oidc_request": auth_req.to_urlencoded()} auth_info = AuthenticationInformation(PASSWORD, "2015-09-30T12:21:37Z", "unittest_idp.xml") internal_response = InternalData(auth_info=auth_info) internal_response.attributes = AttributeMapper(INTERNAL_ATTRIBUTES).to_internal("saml", USERS["testuser1"]) internal_response.subject_id = USERS["testuser1"]["eduPersonTargetedID"][0] return internal_response
def test_multiple_source_attribute_values(self): mapping = { "attributes": { "mail": { "saml": ["mail", "emailAddress", "email"] }, }, } data = { "mail": ["*****@*****.**"], "email": ["*****@*****.**"], "emailAddress": ["*****@*****.**"], } expected = Counter(["*****@*****.**", "*****@*****.**", "*****@*****.**"]) converter = AttributeMapper(mapping) internal_repr = converter.to_internal("saml", data) assert Counter(internal_repr["mail"]) == expected external_repr = converter.from_internal("saml", internal_repr) assert Counter(external_repr[mapping["attributes"]["mail"]["saml"][0]]) == expected
def test_template_attribute_fail_does_not_insert_None_attribute_value(self): mapping = { "attributes": { "last_name": { "p1": ["sn"], }, "first_name": { "p1": ["givenName"], }, "name": { "p1": ["cn"] } }, "template_attributes": { "name": { "p1": ["${first_name[0]} ${last_name[0]}"] } } } converter = AttributeMapper(mapping) internal_repr = converter.to_internal("p1", {}) assert len(internal_repr) == 0
def test_nested_attribute_to_internal(self): mapping = { "attributes": { "address": { "openid": ["address.formatted"], }, }, } data = { "address": { "formatted": ["100 Universal City Plaza, Hollywood CA 91608, USA"] } } internal_repr = AttributeMapper(mapping).to_internal("openid", data) assert internal_repr["address"] == data["address"]["formatted"]
def test_custom_attribute_release_with_less_attributes_than_entity_category(self, context, idp_conf, sp_conf, internal_response): idp_metadata_str = create_metadata_from_config_dict(idp_conf) idp_conf["service"]["idp"]["policy"]["default"]["entity_categories"] = ["swamid"] sp_conf["entity_category"] = [SFS_1993_1153] expected_attributes = swamid.RELEASE[SFS_1993_1153] attribute_mapping = {} for expected_attribute in expected_attributes: attribute_mapping[expected_attribute.lower()] = {"saml": [expected_attribute]} internal_attributes = dict(attributes=attribute_mapping) user_attributes = {k: "foo" for k in expected_attributes} internal_response.attributes = AttributeMapper(internal_attributes).to_internal("saml", user_attributes) custom_attributes = {idp_conf["entityid"]: {sp_conf["entityid"]: {"exclude": ["norEduPersonNIN"]}}} samlfrontend = self.setup_for_authn_req(context, idp_conf, sp_conf, internal_attributes=internal_attributes, extra_config=dict(custom_attribute_release=custom_attributes)) resp = self.get_auth_response(samlfrontend, context, internal_response, sp_conf, idp_metadata_str) assert len(resp.ava.keys()) == 0
def __init__(self, config, internal_attributes, *args, **kwargs): super().__init__(*args, **kwargs) self.config = config self.converter = AttributeMapper(internal_attributes)
class DBAttributeStore(ResponseMicroService): """ Use identifier provided by the backend authentication service to lookup a person record in DB and obtain attributes to assert about the user to the frontend receiving service. """ PEOPLE_TABLE = "zone_people" PERSON_SERVICES_TABLE = "zone_person_zone_service" SERVICES_TABLE = "zone_services" logprefix = "DB_ATTRIBUTE_STORE:" attribute_profile = 'saml' def __init__(self, config, internal_attributes, *args, **kwargs): super().__init__(*args, **kwargs) self.config = config self.converter = AttributeMapper(internal_attributes) def process(self, context, data): logprefix = DBAttributeStore.logprefix # Initialize the configuration to use as the default configuration # that is passed during initialization. config = self.config configClean = copy.deepcopy(config) if 'db_password' in configClean: configClean['db_password'] = '******' satosa_logging( logger, logging.DEBUG, "{} Using default configuration {}".format(logprefix, configClean), context.state) # Find the entityID for the SP that initiated the flow and target IdP try: spEntityID = context.state.state_dict['SATOSA_BASE']['requester'] # router = context.state.state_dict['ROUTER'] # idpEntityID = urlsafe_b64decode(context.state.state_dict[router]['target_entity_id']).decode("utf-8") idpEntityID = data.auth_info.issuer except KeyError: satosa_logging( logger, logging.ERROR, "{} Unable to determine the entityID's for the IdP or SP". format(logprefix), context.state) return super().process(context, data) satosa_logging( logger, logging.DEBUG, "{} entityID for the requester is {}".format( logprefix, spEntityID), context.state) satosa_logging( logger, logging.ERROR, "{} entityID for the source IdP is {}".format( logprefix, idpEntityID), context.state) # Examine our configuration to determine if there is a per-SP configuration if spEntityID in self.config: config = self.config[spEntityID] configClean = copy.deepcopy(config) if 'db_password' in configClean: configClean['db_password'] = '******' satosa_logging( logger, logging.DEBUG, "{} For SP {} using configuration {}".format( logprefix, spEntityID, configClean), context.state) # Obtain configuration details from the per-SP configuration or the default configuration try: if 'db_host' in config: db_host = config['db_host'] else: db_host = self.config['db_host'] if 'db_user' in config: db_user = config['db_user'] else: db_user = self.config['db_user'] if 'db_schema' in config: db_schema = config['db_schema'] else: db_schema = self.config['db_schema'] if 'db_password' in config: db_password = config['db_password'] else: db_password = self.config['db_password'] if 'idp_identifiers' in config: idp_identifiers = config['idp_identifiers'] else: idp_identifiers = self.config['idp_identifiers'] if 'clear_input_attributes' in config: clear_input_attributes = config['clear_input_attributes'] elif 'clear_input_attributes' in self.config: clear_input_attributes = self.config['clear_input_attributes'] else: clear_input_attributes = False if 'user_id' in config: user_id = config['user_id'] else: user_id = self.config['user_id'] if 'blacklist' in config: blacklist = config['blacklist'] else: blacklist = self.config.get('blacklist') or [] except KeyError as err: satosa_logging( logger, logging.ERROR, "{} Configuration '{}' is missing".format(logprefix, err), context.state) return super().process(context, data) if spEntityID in blacklist: satosa_logging( logger, logging.DEBUG, "{} Skipping lookup for {}".format(logprefix, spEntityID), context.state) return super().process(context, data) try: # satosa_logging(logger, logging.DEBUG, "{} Using DB host {}".format(logprefix, db_host), context.state) # satosa_logging(logger, logging.DEBUG, "{} Using DB user {}".format(logprefix, db_user), context.state) # satosa_logging(logger, logging.DEBUG, "{} Using DB schema {}".format(logprefix, db_schema), context.state) connection = MySQLdb.connect(host=db_host, user=db_user, passwd=db_password, db=db_schema) cursor = connection.cursor() satosa_logging(logger, logging.DEBUG, "{} Connected to DB server".format(logprefix), context.state) satosa_logging( logger, logging.DEBUG, "{} Using IdP asserted attributes {}".format( logprefix, idp_identifiers), context.state) values = [] if user_id: values += [data.user_id] for identifier in idp_identifiers: if identifier in data.attributes: satosa_logging( logger, logging.DEBUG, "{} IdP asserted {} values for attribute {}: {}". format(logprefix, len(data.attributes[identifier]), identifier, data.attributes[identifier]), context.state) values += data.attributes[identifier] else: satosa_logging( logger, logging.DEBUG, "{} IdP did not assert attribute {}".format( logprefix, identifier), context.state) satosa_logging( logger, logging.DEBUG, "{} IdP asserted values for DB id: {}".format( logprefix, values), context.state) return_values = {} if (len(values) > 0): # Prepare select statement query = "SELECT p.`attributes` FROM `{}` p " query += "JOIN `{}` ps ON p.`id`=ps.`zone_person_id` " query += "JOIN `{}` z ON ps.`zone_service_id`=z.`id` " query += "WHERE p.`uid` in (" + ",".join( ['%s'] * len(values)) + ") " query += "AND z.`metadata`=%s" query = query.format(self.PEOPLE_TABLE, self.PERSON_SERVICES_TABLE, self.SERVICES_TABLE) satosa_logging(logger, logging.DEBUG, "{} query: {}".format(logprefix, query), context.state) # Execute prepared statement cursor.execute(query, values + [spEntityID]) rows = cursor.fetchall() for row in rows: attributes = json.loads(row[0]) for k, v in attributes.items(): if isinstance(v, str): v = [v] return_values.setdefault(k, []).extend(v) if len(rows) > 1: satosa_logging( logger, logging.DEBUG, "{} More than one CO found ({})".format( logprefix, len(rows)), context.state) satosa_logging( logger, logging.DEBUG, "{} return_values: {}".format(logprefix, return_values), context.state) except Exception as err: satosa_logging(logger, logging.ERROR, "{} Caught exception: {0}".format(logprefix, err), None) return super().process(context, data) else: satosa_logging( logger, logging.DEBUG, "{} Closing connection to DB server".format(logprefix), context.state) connection.close() # Before using a found record, if any, to populate attributes # clear any attributes incoming to this microservice if so configured. if clear_input_attributes: satosa_logging( logger, logging.DEBUG, "{} Clearing values from input attributes".format(logprefix), context.state) internal = self.converter.to_internal(self.attribute_profile, return_values) for k, v in internal.items(): if isinstance(v, str): v = [v] if clear_input_attributes: data.attributes[k] = v else: data.attributes.setdefault(k, []).extend(v) satosa_logging( logger, logging.DEBUG, "{} returning data.attributes {}".format(logprefix, str(data.attributes)), context.state) return super().process(context, data)
class SBSAttributeStore(ResponseMicroService): log_prefix = "SBS_ATTRIBUTE_STORE:" attribute_profile = "saml" def __init__(self, config, internal_attributes, *args, **kwargs): super().__init__(*args, **kwargs) self.config = config self.converter = AttributeMapper(internal_attributes) @staticmethod def _debug(msg, context): satosa_logging(logger, logging.DEBUG, msg, context.state) def process(self, context, data): config_clean = copy.deepcopy(self.config) if "sbs_api_password" in config_clean: del config_clean["sbs_api_password"] self._debug(f"{self.log_prefix} Using default configuration {config_clean}", context) # Find the entityID for the SP that initiated the flow and target IdP try: sp_entity_id = context.state.state_dict["SATOSA_BASE"]["requester"] idp_entity_id = data.auth_info.issuer except KeyError: satosa_logging(logger, logging.ERROR, f"{self.log_prefix} Unable to determine the entityID's for the IdP or SP", context.state) return super().process(context, data) self._debug(f"{self.log_prefix} entityID for the requester is {sp_entity_id}", context) self._debug(f"{self.log_prefix} entityID for the source IdP is {idp_entity_id}", context) try: sbs_api_user = self.config["sbs_api_user"] sbs_api_password = self.config["sbs_api_password"] sbs_api_base_url = self.config["sbs_api_base_url"] sbs_blacklist = self.config.get("sbs_blacklist") or [] except KeyError as err: satosa_logging(logger, logging.ERROR, f"{self.log_prefix} Configuration {err} is missing", context.state) return super().process(context, data) if sp_entity_id in sbs_blacklist: satosa_logging(logger, logging.DEBUG, f"{self.log_prefix} Skipping lookup for {sp_entity_id}", context.state) return super().process(context, data) res = requests.get(f"{sbs_api_base_url}api/users/attributes", params={"service_entity_id": sp_entity_id, "uid": data.user_id}, auth=(sbs_api_user, sbs_api_password)) if res.status_code != 200: satosa_logging(logger, logging.ERROR, f"{self.log_prefix} Error response {res.status_code} from SBS", context.state) return super().process(context, data) json_response = res.json() self._debug(f"{self.log_prefix} Response from SBS: {json_response}", context) internal = self.converter.to_internal(self.attribute_profile, json_response) for k, v in internal.items(): data.attributes[k] = v self._debug(f"{self.log_prefix} returning data.attributes {data.attributes}", context) return super().process(context, data)
class ScimAttributes(ResponseMicroService): """ Add attributes from the scim db to the responses. """ def __init__(self, config: Mapping[str, Any], internal_attributes: Dict[str, Any], *args, **kwargs): super().__init__(*args, **kwargs) self.config = Config(**config) # Setup databases self.eduid_userdb = UserDB(db_uri=self.config.mongo_uri, db_name='eduid_scimapi') logger.info(f'Connected to eduid db: {self.eduid_userdb}') # TODO: Implement real 'data owner' to database lookup data_owner = 'eduid.se' _owner = data_owner.replace('.', '_') # replace dots with underscores coll = f'{_owner}__users' # TODO: rename old collection and remove this if data_owner == 'eduid.se': coll = 'profiles' self._userdbs = { 'eduid.se': ScimApiUserDB(db_uri=self.config.mongo_uri, collection=coll) } self.converter = AttributeMapper(internal_attributes) # Get the internal attribute name for the eduPersonPrincipalName that will be # used to find users in the SCIM database _int = self.converter.to_internal( 'saml', {'eduPersonPrincipalName': 'something'}) self.ext_id_attr = list(_int.keys())[0] logger.debug( f'SCIM externalId internal attribute name: {self.ext_id_attr}') def process( self, context: satosa.context.Context, data: satosa.internal.InternalData, ) -> satosa.internal.InternalData: logger.debug(f'Data as dict:\n{pprint.pformat(data.to_dict())}') user = self._get_user(data) if user: # TODO: handle multiple profiles beyond just picking the first one profiles = user.profiles.keys() if profiles: _name = sorted(profiles)[0] logger.info( f'Applying attributes from SCIM user {user.scim_id}, profile {_name}' ) profile = user.profiles[_name] update = self.converter.to_internal('saml', profile.attributes) for _name, _new in update.items(): _old = data.attributes.get(_name) if _old != _new: logger.debug( f'Changing attribute {_name} from {repr(_old)} to {repr(_new)}' ) data.attributes[_name] = _new return super().process(context, data) def _get_user(self, data: satosa.internal.InternalData) -> Optional[ScimApiUser]: data_owner = self.config.idp_to_data_owner.get(data.auth_info.issuer) logger.debug( f'Data owner for IdP {data.auth_info.issuer}: {data_owner}') if not data_owner: return None userdb = self._userdbs.get(data_owner) if not userdb: logger.error(f'Found no userdb for data owner {data_owner}') return None _ext_ids = data.attributes.get(self.ext_id_attr, []) if _ext_ids: ext_id = _ext_ids[0] user = userdb.get_user_by_external_id(ext_id) if user: logger.info( f'Found SCIM user {user.scim_id} using {self.ext_id_attr} {ext_id} (data owner: {data_owner})' ) else: logger.info(f'No user found using {self.ext_id_attr} {ext_id}') return user return None