def load_by_dic(self, dictionnary): """ Dictionnary format: attributes = dict() data_from_source = list() a1 = dict() a1['oid'] = definition_name Or a1['definition'] = definition_name definition may be the definition name like 'gn' or an alias like 'givenName' Or a1['name'] = attribute_name_in_ns a1['namespace'] = ns_name a1['values'] = list_of_values data_from_source.append(a1) ... data_from_source.append(a2) attributes[source_name] = data_from_source First attempt on 'definition' key. Else, definition is searched by 'name' and 'namespece' keys. """ if not dictionnary: logger.info("load_by_dic: empty dictionnary provided") return -1 for source_name in dictionnary: logger.debug("load_by_dic: loading from source with name: %s" % source_name) source = get_source_from_name(source_name) if source: logger.debug("load_by_dic: attributes: %s" % str(dictionnary[source_name])) for attribute in dictionnary[source_name]: if ( not "oid" in attribute and not "definition" in attribute and not ("name" in attribute and "namespace" in attribute) ) or not "values" in attribute: logger.warn( "load_by_dic: \ missing data to treat %s" % str(attribute) ) else: definition = None if "oid" in attribute: definition = get_def_name_from_oid(attribute["oid"]) elif "definition" in attribute: if attribute["definition"] in ATTRIBUTE_MAPPING: definition = attribute["definition"] else: definition = get_def_name_from_alias(attribute["definition"]) else: definition = get_def_name_from_name_and_ns_of_attribute( attribute["name"], attribute["namespace"] ) if not definition: logger.warn( "load_by_dic: \ unable to find definition for %s" % str(attribute) ) else: logger.debug( "load_by_dic: \ definition %s found" % definition ) expiration_date = None if "expiration_date" in attribute: logger.debug("load_by_dic: expire at %s" % attribute["expiration_date"]) try: iso8601_to_datetime(attribute["expiration_date"]) expiration_date = attribute["expiration_date"] logger.debug( "load_by_dic: expiration \ date has the ISO8601 format" ) except: logger.warn( "load_by_dic: expiration \ date has not the ISO8601 format" ) if not expiration_date: expiration_date = datetime.datetime.now().isoformat() values = [value for value in attribute["values"] if convert_from_string(definition, value)] if ( self.add_data( AttributeData( definition, values=values, source=source, expiration_date=expiration_date ) ) == 0 ): logger.debug( "load_by_dic: \ attribute successfully added" ) else: logger.warn( "load_by_dic: \ error addind attribute" ) else: logger.critical( "load_by_dic: \ The source with name %s providing attributes %s \ is unknown of the system" % (str(source_name), str(dictionnary[source_name])) ) return 0
def load_by_dic(self, dictionnary): ''' Dictionnary format: attributes = dict() data_from_source = list() a1 = dict() a1['oid'] = definition_name Or a1['definition'] = definition_name definition may be the definition name like 'gn' or an alias like 'givenName' Or a1['name'] = attribute_name_in_ns a1['namespace'] = ns_name a1['values'] = list_of_values data_from_source.append(a1) ... data_from_source.append(a2) attributes[source_name] = data_from_source First attempt on 'definition' key. Else, definition is searched by 'name' and 'namespece' keys. ''' if not dictionnary: logger.error('load_by_dic: \ Missing profile or dictionnary') return -1 for source_name in dictionnary: logger.debug('load_by_dic: loading from source with name: %s' \ % source_name) source = get_source_from_name(source_name) if source: logger.debug('load_by_dic: attributes: %s' \ % str(dictionnary[source_name])) for attribute in dictionnary[source_name]: if (not 'oid' in attribute \ and not 'definition' in attribute \ and not('name' in attribute \ and 'namespace' in attribute)) \ or not 'values' in attribute: logger.warn('load_by_dic: \ missing data to treat %s' % str(attribute)) else: definition = None if 'oid' in attribute: definition = \ get_def_name_from_oid(attribute['oid']) elif 'definition' in attribute: if attribute['definition'] in ATTRIBUTE_MAPPING: definition = attribute['definition'] else: definition = \ get_def_name_from_alias(attribute['definition']) else: definition = \ get_def_name_from_name_and_ns_of_attribute(\ attribute['name'], attribute['namespace']) if not definition: logger.warn('load_by_dic: \ unable to find definition for %s' \ % str(attribute)) else: logger.debug('load_by_dic: \ definition %s found' % definition) expiration_date = None if 'expiration_date' in attribute: logger.debug('load_by_dic: expire at %s' \ % attribute['expiration_date']) try: iso8601_to_datetime(\ attribute['expiration_date']) expiration_date = \ attribute['expiration_date'] logger.debug('load_by_dic: expiration \ date has the ISO8601 format') except: logger.warn('load_by_dic: expiration \ date has not the ISO8601 format') if not expiration_date: expiration_date = \ datetime.datetime.now().isoformat() values = [value for value in attribute['values'] \ if convert_from_string(definition, value)] if self.add_data(AttributeData(\ definition, values=values, source=source, expiration_date=expiration_date)) == 0: logger.debug('load_by_dic: \ attribute successfully added') else: logger.warn('load_by_dic: \ error addind attribute') else: logger.critical('load_by_dic: \ The source with name %s providing attributes %s \ is unknown of the system' \ % (str(source_name), str(dictionnary[source_name]))) return 0
def provide_attributes_at_sso(request, user, audience, **kwargs): '''This function is called by a service provider asynrhonous binding at sso login. The call is made by the signal add_attributes_to_response. In parameter, the service provider id and the user authenticated.''' if not user or not audience: return None logger.debug('provide_attributes_at_sso: search attribute for %s' \ % user) logger.debug('provide_attributes_at_sso: attributes for %s' \ % audience) provider = None try: provider = LibertyProvider.objects.get(entity_id=audience) except: logger.debug('provide_attributes_at_sso: Provider with name %s not \ found' % audience) attribute_policy = get_attribute_policy(provider) if not attribute_policy: logger.debug('provide_attributes_at_sso: no attribute policy found \ for %s' % audience) return None p = load_or_create_user_profile(user=user) if not p: logger.error('provide_attributes_at_sso: unable to load or create a \ profile for %s' % user) return None logger.debug('provide_attributes_at_sso: profile loaded %s' % p) '''Returned dictionnary''' dic = dict() attributes = dict() # If profile already filled in that session, we should offer to skip # filling. list_pull = attribute_policy.attribute_list_for_sso_from_pull_sources if not list_pull: logger.debug('provide_attributes_at_sso: no attribute list found \ from pull source') else: logger.debug('provide_attributes_at_sso: found attribute list named \ %s' % list_pull.name) l = list_pull.attributes.all() if not l: logger.debug('provide_attributes_at_sso: The list is empty') else: logger.debug('provide_attributes_at_sso: the list contains %s' \ % [a for a in l]) logger.debug('provide_attributes_at_sso: load in profile %s' \ % [a.attribute_name for a in l]) p.load_listed_attributes([a.attribute_name \ for a in l if not a.source]) l_with_source = dict() for a in l: if a.source: if a.source in l_with_source: l_with_source[a.source].append(a.attribute_name) else: l_with_source[a.source] = [a.attribute_name] for source, defs in l_with_source.items(): if source.name == 'PROCESSING': continue auth_source = False if source.name == 'AUTH_BACKEND': auth_source = True p.load_listed_attributes_with_source(defs, source, auth_source=auth_source) logger.debug('provide_attributes_at_sso: process profile') context = ('sso', {'audience' : audience}) p.process(context) for a in l: data = None if a.source: logger.debug('provide_attributes_at_sso: %s must be \ provided by %s' % (a.attribute_name, a.source)) data = \ p.get_data_of_definition_and_source(\ a.attribute_name, a.source) else: data = p.get_data_of_definition(a.attribute_name) '''The freshest''' if not data: logger.debug('provide_attributes_at_sso: %s not found' \ % a.attribute_name) if attribute_policy.\ send_error_and_no_attrs_if_missing_required_attrs \ and a.required: raise Exception('Missing a required attribute') else: logger.debug('provide_attributes_at_sso: found %s' \ % [x.__unicode__() for x in data]) # d = data.sort(key=lambda x: x.expiration_date, reverse=True)[0] d = data[0] try: add_data_to_dic(attributes, a.attribute_name, d.get_values(), a.output_name_format, a.output_namespace, # Send error if required and attribute required (attribute_policy.\ send_error_and_no_attrs_if_missing_required_attrs \ and a.required)) except: # Missing required attribute pass logger.debug('provide_attributes_at_sso: attributes returned \ from pull source %s' % str(attributes)) if attribute_policy.forward_attributes_from_push_sources \ and request and request.session \ and 'multisource_attributes' in request.session: ''' Treat attributes in session ''' logger.debug('provide_attributes_at_sso: attributes is session are \ %s' % str(request.session['multisource_attributes'])) attrs = {} sources = \ attribute_policy.source_filter_for_sso_from_push_sources.all() if sources: s_names = [s.name for s in sources] logger.debug('provide_attributes_at_sso: filter attributes from \ push source, sources accepted are %s' % str(s_names)) for entity_id, l \ in request.session['multisource_attributes'].items(): if entity_id in s_names: for token in l: if 'attributes' in token: logger.debug('provide_attributes_at_sso: \ keep in dic %s' \ % str({entity_id: token['attributes']})) attrs.update({entity_id: token['attributes']}) else: for entity_id, l \ in request.session['multisource_attributes'].items(): for token in l: if 'attributes' in token: logger.debug('provide_attributes_at_sso: \ keep in dic %s' \ % str({entity_id: token['attributes']})) attrs.update({entity_id: token['attributes']}) logger.debug('provide_attributes_at_sso: attributes are %s' \ % str(attrs)) if not attribute_policy.map_attributes_from_push_sources \ and not \ attribute_policy.attribute_filter_for_sso_from_push_sources: #TODO: Load in profile if possible for vals in attrs.values(): attributes.update(vals) else: dic_to_load_in_profile = dict() definitions = list() for entity_id, attrs_list in attrs.items(): source = None if attribute_policy.map_attributes_from_push_sources: try: source = AttributeSource.objects.get(name=entity_id) except: try: lp = \ LibertyProvider.objects.get(entity_id=entity_id) source = AttributeSource.objects.get(name=lp.name) except: pass namespace_in = 'Default' if not source: logger.debug('provide_attributes_at_sso: Not \ attribute source found for %s' \ % str(attributes)) else: logger.debug('provide_attributes_at_sso: Source \ found %s' % source.name) namespace_in = source.namespace logger.debug('provide_attributes_at_sso: \ input namespace is %s' % namespace_in) dic_to_load_in_profile[entity_id] = list() for key, values in attrs_list.items(): logger.debug('provide_attributes_at_sso: treat \ %s' % str(attrs)) found = False try: name, format, fname = key found = True except: try: name, format = key found = True except: try: name = key found = True except: pass if found: logger.debug('provide_attributes_at_sso: \ attribute with name %s' % str(name)) if namespace_in == 'Default': definition = None if name in ATTRIBUTE_MAPPING: definition = name else: definition = \ get_def_name_from_oid(urn_to_oid(name)) if not definition: definition = \ get_definition_from_alias(name) if definition: logger.debug('provide_attributes_at_sso: \ found definition %s' % definition) definitions.append(definition) dic_to_load_in_profile[entity_id].\ append({'definition': definition, 'values': values}) else: definition = \ get_def_name_from_name_and_ns_of_attribute(name, namespace_in) if definition: logger.debug('provide_attributes_at_sso: \ found definition %s' % definition) definitions.append(definition) dic_to_load_in_profile[entity_id].\ append({'name': name, 'namespace': namespace_in, 'values': values}) else: logger.debug('provide_attributes_at_sso: \ unknown format') ''' Load in profile to deal with input mapping ''' logger.debug('provide_attributes_at_sso: \ load in profile %s' % str(dic_to_load_in_profile)) p.load_by_dic(dic_to_load_in_profile) if attribute_policy.attribute_filter_for_sso_from_push_sources: att_l = attribute_policy.\ attribute_filter_for_sso_from_push_sources.\ attributes.all() if att_l: for att in att_l: d = None if attribute_policy.\ filter_source_of_filtered_attributes \ and att.source: d = p.get_data_of_definition_and_source(\ att.attribute_name, att.source) if d: d = d[0] else: d = p.get_freshest_data_of_definition(\ att.attribute_name) if d: namespace_out = 'Default' name_format_out = \ lasso.SAML2_ATTRIBUTE_NAME_FORMAT_URI if attribute_policy.\ map_attributes_of_filtered_attributes: namespace_out = att.output_namespace name_format_out = att.output_name_format elif attribute_policy.\ map_attributes_from_push_sources: namespace_out = \ attribute_policy.output_namespace name_format_out = \ attribute_policy.attribute_name_format logger.debug('provide_attributes_at_sso: \ output namespace %s' % namespace_out) logger.debug('provide_attributes_at_sso: \ output format %s' % name_format_out) add_data_to_dic(attributes, d.definition, d.get_values(), name_format_out, namespace_out, (attribute_policy.\ send_error_and_no_attrs_if_missing_required_attrs \ and att.required)) else: namespace_out = 'Default' name_format_out = lasso.SAML2_ATTRIBUTE_NAME_FORMAT_URI if attribute_policy.map_attributes_from_push_sources: namespace_out = attribute_policy.output_namespace name_format_out = attribute_policy.output_name_format for definition in definitions: data = p.get_freshest_data_of_definition(definition) if data: add_data_to_dic(attributes, data.definition, data.get_values(), name_format_out, namespace_out) logger.debug('provide_attributes_at_sso: attributes returned are \ %s' % str(attributes)) dic['attributes'] = attributes return dic
def provide_attributes_at_sso(request, user, audience, **kwargs): '''This function is called by a service provider asynrhonous binding at sso login. The call is made by the signal add_attributes_to_response. In parameter, the service provider id and the user authenticated.''' if not user or not audience: return None logger.debug('provide_attributes_at_sso: search attribute for %s' \ % user) logger.debug('provide_attributes_at_sso: attributes for %s' \ % audience) provider = None try: provider = LibertyProvider.objects.get(entity_id=audience) except: logger.debug('provide_attributes_at_sso: Provider with name %s not \ found' % audience) attribute_policy = get_attribute_policy(provider) if not attribute_policy: logger.debug('provide_attributes_at_sso: no attribute policy found \ for %s' % audience) return None p = load_or_create_user_profile(user=user) if not p: logger.error('provide_attributes_at_sso: unable to load or create a \ profile for %s' % user) return None logger.debug('provide_attributes_at_sso: profile loaded %s' % p) '''Returned dictionnary''' dic = dict() attributes = dict() list_pull = attribute_policy.attribute_list_for_sso_from_pull_sources if not list_pull: logger.debug('provide_attributes_at_sso: no attribute list found \ from pull source') else: logger.debug('provide_attributes_at_sso: found attribute list named \ %s' % list_pull.name) l = list_pull.attributes.all() if not l: logger.debug('provide_attributes_at_sso: The list is empty') else: logger.debug('provide_attributes_at_sso: the list contains %s' \ % [a for a in l]) logger.debug('provide_attributes_at_sso: load in profile %s' \ % [a.attribute_name for a in l]) p.load_listed_attributes([a.attribute_name \ for a in l if not a.source]) l_with_source = dict() for a in l: if a.source: if a.source in l_with_source: l_with_source[a.source].append(a.attribute_name) else: l_with_source[a.source] = [a.attribute_name] for source, defs in l_with_source.items(): p.load_listed_attributes_with_source(defs, source) for a in l: data = None if a.source: logger.debug('provide_attributes_at_sso: %s must be \ provided by %s' % (a.attribute_name, a.source)) data = \ p.get_data_of_definition_and_source(\ a.attribute_name, a.source) else: data = p.get_data_of_definition(a.attribute_name) '''The freshest''' if not data: logger.debug('provide_attributes_at_sso: %s not found' \ % a.attribute_name) if attribute_policy.\ send_error_and_no_attrs_if_missing_required_attrs \ and a.required: raise Exception('Missing a required attribute') else: logger.debug('provide_attributes_at_sso: found %s' \ % [x.__unicode__() for x in data]) # d = data.sort(key=lambda x: x.expiration_date, reverse=True)[0] d = data[0] try: add_data_to_dic(attributes, a.attribute_name, d.get_values(), a.output_name_format, a.output_namespace, # Send error if required and attribute required (attribute_policy.\ send_error_and_no_attrs_if_missing_required_attrs \ and a.required)) except: # Missing required attribute pass logger.debug('provide_attributes_at_sso: attributes returned \ from pull source %s' % str(attributes)) if attribute_policy.forward_attributes_from_push_sources \ and request and request.session \ and 'multisource_attributes' in request.session: ''' Treat attributes in session ''' logger.debug('provide_attributes_at_sso: attributes is session are \ %s' % str(request.session['multisource_attributes'])) attrs = {} sources = \ attribute_policy.source_filter_for_sso_from_push_sources.all() if sources: s_names = [s.name for s in sources] logger.debug('provide_attributes_at_sso: filter attributes from \ push source, sources accepted are %s' % str(s_names)) for entity_id, l \ in request.session['multisource_attributes'].items(): if entity_id in s_names: for token in l: if 'attributes' in token: logger.debug('provide_attributes_at_sso: \ keep in dic %s' \ % str({entity_id: token['attributes']})) attrs.update({entity_id: token['attributes']}) else: for entity_id, l \ in request.session['multisource_attributes'].items(): for token in l: if 'attributes' in token: logger.debug('provide_attributes_at_sso: \ keep in dic %s' \ % str({entity_id: token['attributes']})) attrs.update({entity_id: token['attributes']}) logger.debug('provide_attributes_at_sso: attributes are %s' \ % str(attrs)) if not attribute_policy.map_attributes_from_push_sources \ and not \ attribute_policy.attribute_filter_for_sso_from_push_sources: #TODO: Load in profile if possible for vals in attrs.values(): attributes.update(vals) else: dic_to_load_in_profile = dict() definitions = list() for entity_id, attrs_list in attrs.items(): source = None if attribute_policy.map_attributes_from_push_sources: try: source = AttributeSource.objects.get(name=entity_id) except: try: lp = \ LibertyProvider.objects.get(entity_id=entity_id) source = AttributeSource.objects.get(name=lp.name) except: pass namespace_in = 'Default' if not source: logger.debug('provide_attributes_at_sso: Not \ attribute source found for %s' \ % str(attributes)) else: logger.debug('provide_attributes_at_sso: Source \ found %s' % source.name) namespace_in = source.namespace logger.debug('provide_attributes_at_sso: \ input namespace is %s' % namespace_in) dic_to_load_in_profile[entity_id] = list() for key, values in attrs_list.items(): logger.debug('provide_attributes_at_sso: treat \ %s' % str(attrs)) found = False try: name, format, fname = key found = True except: try: name, format = key found = True except: try: name = key found = True except: pass if found: logger.debug('provide_attributes_at_sso: \ attribute with name %s' % str(name)) if namespace_in == 'Default': definition = None if name in ATTRIBUTE_MAPPING: definition = name else: definition = \ get_def_name_from_oid(urn_to_oid(name)) if not definition: definition = \ get_definition_from_alias(name) if definition: logger.debug('provide_attributes_at_sso: \ found definition %s' % definition) definitions.append(definition) dic_to_load_in_profile[entity_id].\ append({'definition': definition, 'values': values}) else: definition = \ get_def_name_from_name_and_ns_of_attribute(name, namespace_in) if definition: logger.debug('provide_attributes_at_sso: \ found definition %s' % definition) definitions.append(definition) dic_to_load_in_profile[entity_id].\ append({'name': name, 'namespace': namespace_in, 'values': values}) else: logger.debug('provide_attributes_at_sso: \ unknown format') ''' Load in profile to deal with input mapping ''' logger.debug('provide_attributes_at_sso: \ load in profile %s' % str(dic_to_load_in_profile)) p.load_by_dic(dic_to_load_in_profile) if attribute_policy.attribute_filter_for_sso_from_push_sources: att_l = attribute_policy.\ attribute_filter_for_sso_from_push_sources.\ attributes.all() if att_l: for att in att_l: d = None if attribute_policy.\ filter_source_of_filtered_attributes \ and att.source: d = p.get_data_of_definition_and_source(\ att.attribute_name, att.source) if d: d = d[0] else: d = p.get_freshest_data_of_definition(\ att.attribute_name) if d: namespace_out = 'Default' name_format_out = \ lasso.SAML2_ATTRIBUTE_NAME_FORMAT_URI if attribute_policy.\ map_attributes_of_filtered_attributes: namespace_out = att.output_namespace name_format_out = att.output_name_format elif attribute_policy.\ map_attributes_from_push_sources: namespace_out = \ attribute_policy.output_namespace name_format_out = \ attribute_policy.attribute_name_format logger.debug('provide_attributes_at_sso: \ output namespace %s' % namespace_out) logger.debug('provide_attributes_at_sso: \ output format %s' % name_format_out) add_data_to_dic(attributes, d.definition, d.get_values(), name_format_out, namespace_out, (attribute_policy.\ send_error_and_no_attrs_if_missing_required_attrs \ and att.required)) else: namespace_out = 'Default' name_format_out = lasso.SAML2_ATTRIBUTE_NAME_FORMAT_URI if attribute_policy.map_attributes_from_push_sources: namespace_out = attribute_policy.output_namespace name_format_out = attribute_policy.output_name_format for definition in definitions: data = p.get_freshest_data_of_definition(definition) if data: add_data_to_dic(attributes, data.definition, data.get_values(), name_format_out, namespace_out) logger.debug('provide_attributes_at_sso: attributes returned are \ %s' % str(attributes)) dic['attributes'] = attributes return dic