def _validate_subject_dn_data(self, subject_dn): """Confirm that the subject_dn contains valid data Validate that the subject_dn string parses without error If not, raise InvalidSubjectDN """ try: parse_dn(subject_dn) except ldap_exceptions.LDAPInvalidDnError: raise exception.InvalidSubjectDN(subject_dn=subject_dn)
def authenticate(self, username, password, attribute=None, base_dn=None, search_filter=None, search_scope=SUBTREE): '''Attempts to bind a user to the LDAP server. Args: username (str): DN or the username to attempt to bind with. password (str): The password of the username. attribute (str): The LDAP attribute for the username. base_dn (str): The LDAP basedn to search on. search_filter (str): LDAP searchfilter to attempt the user search with. Returns: bool: ``True`` if successful or ``False`` if the credentials are invalid. ''' # If the username is no valid DN we can bind with, we need to find # the user first. valid_dn = False try: parse_dn(username) valid_dn = True except LDAPInvalidDnError: pass if valid_dn is False: user_filter = '({0}={1})'.format(attribute, username) if search_filter is not None: user_filter = '(&{0}{1})'.format(user_filter, search_filter) try: self.connection.search(base_dn, user_filter, search_scope, attributes=[attribute]) response = self.connection.response username = response[0]['dn'] except (LDAPInvalidDnError, LDAPInvalidFilterError, IndexError): return False try: conn = self.connect(username, password) conn.unbind() return True except LDAPBindError: return False
def doc_id_from_dn(dn): parsed_dn = dnutils.parse_dn(dn) doc_id = parsed_dn[0][1] if doc_id == "jans": doc_id = "_" return doc_id
def _handle_move_object(self, param): """ Moves an arbitrary object within the directory to a new OU. """ action_result = self.add_action_result(ActionResult(dict(param))) summary = action_result.update_summary({}) ar_data = {} obj = param['object'] new_ou = param['new_ou'] if not self._ldap_bind(): return RetVal(action_result.set_status(phantom.APP_ERROR)) try: cn = '='.join(parse_dn(obj)[0][:-1]) res = self._ldap_connection.modify_dn(obj, cn, new_superior=new_ou) if not res: return RetVal( action_result.set_status( phantom.APP_ERROR, self._ldap_connection.result, )) summary["moved"] = True ar_data["source_object"] = obj ar_data["destination_container"] = new_ou except Exception as e: return RetVal( action_result.set_status(phantom.APP_ERROR, "", exception=e))
def test_module(self): """ Basic test connection and validation of the Ldap integration. """ build_number = get_demisto_version().get('buildNumber', LdapClient.DEV_BUILD_NUMBER) self._get_formatted_custom_attributes() if build_number != LdapClient.DEV_BUILD_NUMBER \ and LdapClient.SUPPORTED_BUILD_NUMBER > int(build_number): raise Exception(f'OpenLDAP integration is supported from build number: {LdapClient.SUPPORTED_BUILD_NUMBER}') try: parse_dn(self._username) except LDAPInvalidDnError: raise Exception("Invalid credentials input. Credentials must be full DN.") self.authenticate_ldap_user(username=self._username, password=self._password) demisto.results('ok')
def user_get_groups(attributes): """Retrieve the group membership :param attributes: LDAP attributes for user :returns: List -- A list of groups that the user is a member of """ groups = attributes.get('memberOf', []) group_dns = [dn.parse_dn(g) for g in groups] return [x[0][1] for x in group_dns if x[1] == ('OU', 'Groups', ',')]
def get_rootdn(self, dn): dn_parsed = dnutils.parse_dn(dn) dn_parsed.pop(0) dnl=[] for dnp in dn_parsed: dnl.append('='.join(dnp[:2])) return ','.join(dnl)
def formatGroupsHtml(self, grouplist): outcache = [] for group in grouplist: cn = self.unescapecn(dn.parse_dn(group)[0][1]) outcache.append( u'<a href="%s.html#cn_%s" title="%s">%s</a>' % (self.config.users_by_group, quote_plus(self.formatId(cn)), self.htmlescape(group), self.htmlescape(cn))) return ', '.join(outcache)
def formatGroupsGrep(self, grouplist): outcache = [] for group in grouplist: try: cn = self.unescapecn(dn.parse_dn(group)[0][1]) except LDAPInvalidDnError: # Parsing failed, do it manually cn = self.unescapecn(self.parseDnFallback(group)) outcache.append(cn) return ', '.join(outcache)
def formatGroupsHtml(self, grouplist): outcache = [] for group in grouplist: try: cn = self.unescapecn(dn.parse_dn(group)[0][1]) except LDAPInvalidDnError: # Parsing failed, do it manually cn = self.unescapecn(self.parseDnFallback(group)) outcache.append('<a href="%s.html#cn_%s" title="%s">%s</a>' % (self.config.users_by_group, quote_plus(self.formatId(cn)), self.htmlescape(group), self.htmlescape(cn))) return ', '.join(outcache)
def formatGroupsHtml(self, grouplist): outcache = [] for group in grouplist: try: cn = self.unescapecn(dn.parse_dn(group)[0][1]) except LDAPInvalidDnError: # Parsing failed, do it manually cn = self.unescapecn(self.parseDnFallback(group)) outcache.append( '<a href="%s.html#cn_%s" title="%s">%s</a>' % (self.config.users_by_group, quote_plus(self.formatId(cn)), self.htmlescape(group), self.htmlescape(cn))) return ', '.join(outcache)
def get_ldap_attribute(entry, attribute): path = attribute.split('.') values = entry.entry_attributes_as_dict[path[0]] path = path[1:] for value in values: if path: dn = parse_dn(value) value = dict() for type_, name, _ in dn: names = value.setdefault(type_, []) names.append(name) logger.debug("Parsed DN: %s", value) value = value[path[0]][0] yield value
def _dn_to_path(dn): """Split DN into top-down normalized node names.""" path = [] rdn = [] for component in parse_dn(dn): rdn.append(component[0] + "=" + component[1]) if component[2] != "+": path.append( "+".join(sorted(rdn)) ) rdn.clear() return reversed(path)
def get_key_from(dn): dns = [] for rd in dnutils.parse_dn(dn): if rd[0] == 'o' and rd[1] == 'jans': continue dns.append(rd[1]) dns.reverse() key = '_'.join(dns) if not key: key = '_' return key
def _is_valid_dn(dn: str, user_identifier_attribute: str) -> Tuple[bool, str]: """ Validates whether given input is valid ldap DN. Returns flag indicator and user's identifier value from DN. """ try: parsed_dn = parse_dn(dn, strip=False) for attribute_and_value in parsed_dn: if attribute_and_value[0].lower() == user_identifier_attribute.lower(): return True, attribute_and_value[1] raise Exception(f'OpenLDAP {user_identifier_attribute} attribute was not found in user DN : {dn}') except LDAPInvalidDnError as e: demisto.debug(f'OpenLDAP failed parsing DN with error: {str(e)}. Fallback for unique id activated') return False, dn except Exception: raise
def search(self, search_base, search_filter='(objectClass=*)', search_scope=ldap3.LEVEL): base.logIt("Searching database for dn {} with filter {}".format( search_base, search_filter)) backend_location = self.get_backend_location_for_dn(search_base) if backend_location == BackendTypes.LDAP: if self.ldap_conn.search(search_base=search_base, search_filter=search_filter, search_scope=search_scope, attributes=['*']): key, document = ldif_utils.get_document_from_entry( self.ldap_conn.response[0]['dn'], self.ldap_conn.response[0]['attributes']) return document if backend_location == BackendTypes.COUCHBASE: key = ldif_utils.get_key_from(search_base) bucket = self.get_bucket_for_key(key) if search_scope == ldap3.BASE: n1ql = 'SELECT * FROM `{}` USE KEYS "{}"'.format(bucket, key) else: parsed_dn = dnutils.parse_dn( search_filter.strip('(').strip(')')) attr = parsed_dn[0][0] val = parsed_dn[0][1] if '*' in val: search_clause = 'LIKE "{}"'.format(val.replace('*', '%')) else: search_clause = '="{}"'.format(val.replace('*', '%')) n1ql = 'SELECT * FROM `{}` WHERE `{}` {}'.format( bucket, attr, search_clause) result = self.cbm.exec_query(n1ql) if result.ok: data = result.json() if data.get('results'): return data['results'][0][bucket]
def authenticate(self, username, password): user_dn = self.user_pattern.format(username) try: conn = ldap3.Connection(self._server, user_dn, password, auto_bind=True) except ldap3.core.exceptions.LDAPException as e: logger.warn('Failed to bind to LDAP: {}'.format(e)) return False attrs = [self.email_attr] if self.require_groups: attrs.append(self.member_attr) try: conn.search(conn.user, '(objectClass=*)', search_scope=ldap3.BASE, attributes=attrs) except ldap3.core.exceptions.LDAPException as e: logger.warn('Failed to read LDAP attributes: {}'.format(e)) return False result = conn.entries[0] if self.require_groups: granted = False for membership in getattr(result, self.member_attr).values: try: rdns = parse_dn(membership) except ldap3.core.exceptions.LDAPInvalidDnError: group = membership else: group = rdns[0][1] if group.lower() in self.require_groups: granted = True break if not granted: logger.info('{} not in any required groups'.format(username)) return False return { 'username': username.lower(), 'email': getattr(result, self.email_attr).values[0] }
def __init__(self, DN, settings, keysize=None, sign_ca=Ellipsis, **kwargs): if keysize is None: keysize = current_app.config['RSA_KEYSIZE'] key = rsa.generate_private_key(public_exponent=65537, key_size=keysize, backend=default_backend()) self.private_key = key.private_bytes( encoding=serialization.Encoding.DER, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption(), ) name_parts = parse_dn(DN) subject = x509.Name([ x509.NameAttribute(X509_NAME_MAP[e[0]], e[1]) for e in name_parts ]) extras = dict(kwargs) if sign_ca is Ellipsis: # Self-Sign base = _CERTIFICATE_SETTINGS_SELFSIGN(x509.CertificateBuilder(), subject=subject) sign_cb = lambda a: a.sign(key, hashes.SHA256(), default_backend()) status = CertificateStatus.ACTIVE elif sign_ca is None: # CSR base = x509.CertificateRequestBuilder() sign_cb = lambda a: a.sign(key, hashes.SHA256(), default_backend()) status = CertificateStatus.REQUEST else: base = x509.CertificateBuilder() sign_cb = lambda a: sign_ca.sign_certificate( a.serial_number(extras['serial_number'])) status = CertificateStatus.ACTIVE certificate = sign_cb( settings(base, subject=subject, key=key, **extras)) self.certificate = certificate.public_bytes(serialization.Encoding.DER) print(self.prettyPrint())
def dn_to_domain(dn): return '.'.join(i[1] for i in parse_dn(dn))
s = a else: s = mapping['attribute'][a] new_entry[s] = entry[a] new_entry['objectClass'] = [] for oc in entry.get('objectClass', []): if oc in mapped_json['exclude']['objectClass']: continue nn = mapping['objectClass'].get(oc, oc) new_entry['objectClass'].append(nn) new_dn_list = [] for dne in dnutils.parse_dn(dn): k = dne[0] for ot in mapping: for e in mapping[ot]: if dne[0] == e: k = mapping[ot][e] break new_val = [k, dne[1]] for dnc in dn_coversions: if dnc[0] == dne[0] and dnc[1] == dne[1]: new_val[1] = dnc[2] new_dn_list.append('='.join(new_val)) new_dn = ','.join(new_dn_list)
def test_server_configuration(load_test_server_yaml) -> UDMServer: # pragma: no cover """ Get data of server used to run tests. :raises: BadTestServerConfig :raises: NoTestServerConfig """ print(f"Trying to load test server config from {TEST_SERVER_YAML_FILENAME}...") try: res = load_test_server_yaml() _test_a_server_configuration(res) except FileNotFoundError: print(f"File not found: {TEST_SERVER_YAML_FILENAME}.") except TypeError as exc: raise BadTestServerConfig( f"Error in {TEST_SERVER_YAML_FILENAME}: {exc!s}" ) from exc except udm_rest_client.exceptions.APICommunicationError as exc: raise BadTestServerConfig( f"Error connecting to test server using credentials " f"from {TEST_SERVER_YAML_FILENAME}: [{exc.status}] {exc.reason}" ) from exc else: return res print(f"Trying to use running Docker container {TEST_DOCKER_CONTAINER_NAME!r}...") try: res = running_test_container() _test_a_server_configuration(res) except ContainerNotFound: print(f"Container not found: {TEST_DOCKER_CONTAINER_NAME}.") except ContainerIpUnknown as exc: raise BadTestServerConfig(str(exc)) from exc except udm_rest_client.exceptions.APICommunicationError as exc: raise BadTestServerConfig( "Error connecting to test server using credentials for Docker " "Docker container {TEST_DOCKER_CONTAINER_NAME!r}: [{exc.status}] {exc!s}" ) from exc else: return res print("Trying to use load test server config from environment...") try: res = UDMServer( host=os.environ["UCS_HOST"], username=parse_dn(os.environ["UCS_USERDN"])[0][1], user_dn=os.environ["UCS_USERDN"], password=os.environ["UCS_PASSWORD"], ) _test_a_server_configuration(res) except (IndexError, KeyError): print("Test server config not found in environment.") except LDAPInvalidDnError as exc: raise BadTestServerConfig( f"Invalid DN in environment variable 'UCS_USERDN': {exc!s}" ) except udm_rest_client.exceptions.APICommunicationError as exc: raise BadTestServerConfig( f"Error connecting to test server using credentials from the " f"environment: [{exc.status}] {exc.reason}" ) from exc else: return res raise NoTestServerConfig("No test server configuration found.")
def getGroupCnFromDn(self, dnin): cn = self.unescapecn(dn.parse_dn(dnin)[0][1]) return cn
def generate_properties(as_dict=False): setup_prop = {} default_storage = 'ldap' setup_prop['persistence_type'] = 'ldap' setup_prop['encode_salt'] = salt mappingLocations = { 'default': 'ldap', 'token': 'ldap', 'cache': 'ldap', 'user': '******', 'site': 'ldap' } oxauth_file = '/opt/tomcat/webapps/oxauth.war' if not os.path.exists(oxauth_file): oxauth_file = '/opt/gluu/jetty/oxauth/webapps/oxauth.war' #Determine gluu version war_zip = zipfile.ZipFile(oxauth_file, 'r') menifest = war_zip.read('META-INF/MANIFEST.MF') for l in menifest.splitlines(): ls = l.strip() if sys.version_info[0] > 2: ls = ls.decode('utf-8') n = ls.find(':') if ls[:n].strip() == 'Implementation-Version': gluu_version_str = ls[n + 1:].strip() gluu_version_list = gluu_version_str.split('.') if not gluu_version_list[-1].isdigit(): gluu_version_list.pop(-1) gluu_version = '.'.join(gluu_version_list) if __name__ == '__main__': print("Current Gluu Version is determined as", gluu_version) gluu_3x = '.'.join(gluu_version.split('.')[:2]) < '4.0' if gluu_3x: ox_ldap_prop_file = '/opt/tomcat/conf/ox-ldap.properties' if not os.path.exists(ox_ldap_prop_file): ox_ldap_prop_file = '/etc/gluu/conf/ox-ldap.properties' gluu_ldap_prop = read_properties_file(ox_ldap_prop_file) oxauth_ConfigurationEntryDN = gluu_ldap_prop[ 'oxauth_ConfigurationEntryDN'] oxtrust_ConfigurationEntryDN = gluu_ldap_prop[ 'oxtrust_ConfigurationEntryDN'] oxidp_ConfigurationEntryDN = gluu_ldap_prop[ 'oxidp_ConfigurationEntryDN'] gluu_ConfigurationDN = ','.join( oxauth_ConfigurationEntryDN.split(',')[2:]) inum_org_str = oxauth_ConfigurationEntryDN.split(',')[2] else: gluu_prop = read_properties_file('/etc/gluu/conf/gluu.properties') setup_prop['persistence_type'] = gluu_prop['persistence.type'] oxauth_ConfigurationEntryDN = gluu_prop['oxauth_ConfigurationEntryDN'] oxtrust_ConfigurationEntryDN = gluu_prop[ 'oxtrust_ConfigurationEntryDN'] oxidp_ConfigurationEntryDN = gluu_prop['oxidp_ConfigurationEntryDN'] gluu_ConfigurationDN = 'ou=configuration,o=gluu' gluu_hybrid_properties_fn = '/etc/gluu/conf/gluu-hybrid.properties' if setup_prop['persistence_type'] == 'couchbase': mappingLocations = { 'default': 'couchbase', 'token': 'couchbase', 'cache': 'couchbase', 'user': '******', 'site': 'couchbase' } default_storage = 'couchbase' if setup_prop['persistence_type'] in ('hybrid'): gluu_hybrid_properties = read_properties_file( gluu_hybrid_properties_fn) mappingLocations = { 'default': gluu_hybrid_properties['storage.default'] } storages = [ storage.strip() for storage in gluu_hybrid_properties['storages'].split(',') ] for ml, m in (('user', 'people'), ('cache', 'cache'), ('site', 'cache-refresh'), ('token', 'tokens')): for storage in storages: if m in gluu_hybrid_properties.get( 'storage.{}.mapping'.format(storage), []): mappingLocations[ml] = storage default_storage = mappingLocations['default'] if setup_prop['persistence_type'] in ('ldap', 'hybrid'): gluu_ldap_prop_fn = '/etc/gluu/conf/gluu-ldap.properties' if os.path.exists(gluu_ldap_prop_fn): gluu_ldap_prop = read_properties_file(gluu_ldap_prop_fn) if setup_prop['persistence_type'] != 'ldap': gluu_cb_prop_fn = '/etc/gluu/conf/gluu-couchbase.properties' if os.path.exists(gluu_cb_prop_fn): gluu_cb_prop = read_properties_file(gluu_cb_prop_fn) setup_prop['couchebaseClusterAdmin'] = gluu_cb_prop[ 'auth.userName'] setup_prop['encoded_cb_password'] = gluu_cb_prop[ 'auth.userPassword'] setup_prop['cb_password'] = unobscure( setup_prop['encoded_cb_password']) setup_prop['couchbase_bucket_prefix'] = gluu_cb_prop[ 'bucket.default'] setup_prop['couchbase_hostname'] = gluu_cb_prop[ 'servers'].split(',')[0].strip() setup_prop['encoded_couchbaseTrustStorePass'] = gluu_cb_prop[ 'ssl.trustStore.pin'] setup_prop['couchbaseTrustStorePass'] = unobscure( gluu_cb_prop['ssl.trustStore.pin']) if ((3, 0) <= sys.version_info <= (3, 9)): from .cbm import CBM elif ((2, 0) <= sys.version_info <= (2, 9)): from cbm import CBM cbm = CBM(setup_prop['couchbase_hostname'], setup_prop['couchebaseClusterAdmin'], setup_prop['cb_password']) cb_who = cbm.whoami() if cb_who.get('roles'): for rd in cb_who['roles']: for r in rd: if r == 'role' and rd[r] == 'admin': setup_prop['isCouchbaseUserAdmin'] = True break if gluu_version < '4.1.0': jetty_services = { 'oxauth': ('installOxAuth', 0.3, 0.7), 'identity': ('installOxTrust', 0.2), 'idp': ('installSaml', 0.2), 'oxauth-rp': ('installOxAuthRP', 0.1), 'passport': ('installPassport', 0.1), } else: jetty_services = { 'oxauth': ('installOxAuth', 0.2, 0.7), 'identity': ('installOxTrust', 0.25), 'idp': ('installSaml', 0.25), 'oxauth-rp': ('installOxAuthRP', 0.1), 'casa': ('installCasa', 0.1), 'passport': ('installPassport', 0.1), } if setup_prop['persistence_type'] != 'couchbase': setup_prop['ldap_binddn'] = gluu_ldap_prop['bindDN'] setup_prop['ldapPass'] = unobscure(gluu_ldap_prop['bindPassword']) try: setup_prop['opendj_p12_pass'] = unobscure( gluu_ldap_prop['ssl.trustStorePin']) except: pass setup_prop['ldap_hostname'], setup_prop['ldaps_port'] = gluu_ldap_prop[ 'servers'].split(',')[0].split(':') ldap_server = ldap3.Server(setup_prop['ldap_hostname'], port=int(setup_prop['ldaps_port']), use_ssl=True) ldap_conn = ldap3.Connection( ldap_server, user=setup_prop['ldap_binddn'], password=setup_prop['ldapPass'], ) ldap_conn.bind() if gluu_3x: ldap_conn.search(search_base='o=gluu', search_scope=ldap3.LEVEL, search_filter='(objectClass=*)', attributes=['*']) result = ldap_conn.response for entry in result: if 'gluuOrganization' in entry['attributes']['objectClass']: inumOrg = entry['attributes']['o'][0] uma_rpt_policy_inum = '{}!0011!2DAF.F995'.format(inumOrg) scim_access_policy_inum = '{}!0011!2DAF-F9A5'.format(inumOrg) else: uma_rpt_policy_inum = '2DAF-F995' scim_access_policy_inum = '2DAF-F9A5' if default_storage == 'ldap': ldap_conn.search(search_base='ou=oxradius,ou=configuration,o=gluu', search_scope=ldap3.BASE, search_filter='(objectClass=*)', attributes=['*']) result = ldap_conn.response if result: setup_prop['installGluuRadius'] = True ldap_conn.search(search_base='ou=clients,o=gluu', search_scope=ldap3.SUBTREE, search_filter='(inum=1701.*)', attributes=['*']) result = ldap_conn.response if result: setup_prop['gluu_radius_client_id'] = result[0]['attributes'][ 'inum'][0] setup_prop['gluu_ro_encoded_pw'] = result[0]['attributes'][ 'oxAuthClientSecret'][0] setup_prop['gluu_ro_pw'] = unobscure( setup_prop['gluu_ro_encoded_pw']) ldap_conn.search(search_base='inum=5866-4202,ou=scripts,o=gluu', search_scope=ldap3.BASE, search_filter='(objectClass=*)', attributes=['oxEnabled']) result = ldap_conn.response if result and result[0]['attributes']['oxEnabled'][0]: setup_prop['enableRadiusScripts'] = True ldap_conn.search(search_base='ou=clients,o=gluu', search_scope=ldap3.SUBTREE, search_filter='(inum=1402.*)', attributes=['inum']) result = ldap_conn.response if result: setup_prop['oxtrust_requesting_party_client_id'] = result[0][ 'attributes']['inum'][0] elif default_storage == 'couchbase': n1ql = 'SELECT * from `{}` USE KEYS "configuration_oxradius"'.format( setup_prop['couchbase_bucket_prefix']) result = get_cb_result(cbm, n1ql) if result: setup_prop['installGluuRadius'] = True n1ql = 'SELECT inum from `{}` WHERE objectClass="oxAuthClient" AND inum LIKE "1701.%"'.format( setup_prop['couchbase_bucket_prefix']) result = get_cb_result(cbm, n1ql) if result: setup_prop['gluu_radius_client_id'] = str(result[0]['inum']) if 'oxAuthClientSecret' in result[0]: setup_prop['gluu_ro_encoded_pw'] = str( result[0]['oxAuthClientSecret']) setup_prop['gluu_ro_pw'] = unobscure( setup_prop['gluu_ro_encoded_pw']) n1ql = 'SELECT oxEnabled from `{}` USE KEYS "scripts_5866-4202"'.format( setup_prop['couchbase_bucket_prefix']) result = get_cb_result(cbm, n1ql) if result and result[0]['oxEnabled']: setup_prop['enableRadiusScripts'] = True n1ql = 'SELECT inum from `{}` WHERE objectClass="oxAuthClient" AND inum LIKE "1402.%"'.format( setup_prop['couchbase_bucket_prefix']) result = get_cb_result(cbm, n1ql) if result: setup_prop['oxtrust_requesting_party_client_id'] = str( result[0]['inum']) admin_dn = None if mappingLocations['user'] == 'ldap': ldap_conn.search(search_base='o=gluu', search_scope=ldap3.SUBTREE, search_filter='(gluuGroupType=gluuManagerGroup)', attributes=['member']) result = ldap_conn.response if result and result[0]['attributes'].get('member'): admin_dn = result[0]['attributes']['member'][0] if mappingLocations['user'] == 'couchbase': bucket = '{}_user'.format(setup_prop['couchbase_bucket_prefix']) n1ql = 'SELECT * from `{}` where objectClass="gluuGroup" and gluuGroupType="gluuManagerGroup"'.format( bucket) result = get_cb_result(cbm, n1ql) if result and result[0][bucket]['member']: admin_dn = result[0][bucket]['member'][0] if admin_dn: for rd in dnutils.parse_dn(admin_dn): if rd[0] == 'inum': setup_prop['admin_inum'] = str(rd[1]) break oxTrustConfApplication = None oxConfApplication = None oxAuthConfDynamic = None oxAuthConfDynamic = None if default_storage == 'ldap': ldap_conn.search(search_base=gluu_ConfigurationDN, search_scope=ldap3.BASE, search_filter='(objectClass=*)', attributes=['*']) result = ldap_conn.response if 'gluuIpAddress' in result[0]['attributes']: setup_prop['ip'] = str(result[0]['attributes']['gluuIpAddress'][0]) try: oxCacheConfiguration = json.loads( result[0]['attributes']['oxCacheConfiguration'][0]) setup_prop['cache_provider_type'] = str( oxCacheConfiguration['cacheProviderType']) except Exception as e: print("Error getting cache provider type", e) result = ldap_conn.search( search_base=oxidp_ConfigurationEntryDN, search_scope=ldap3.BASE, search_filter='(objectClass=oxApplicationConfiguration)', attributes=['oxConfApplication']) result = ldap_conn.response oxConfApplication = json.loads( result[0]['attributes']['oxConfApplication'][0]) result = ldap_conn.search( search_base=oxauth_ConfigurationEntryDN, search_scope=ldap3.BASE, search_filter='(objectClass=oxAuthConfiguration)', attributes=['oxAuthConfDynamic']) result = ldap_conn.response oxAuthConfDynamic = json.loads( result[0]['attributes']['oxAuthConfDynamic'][0]) result = ldap_conn.search( search_base=oxtrust_ConfigurationEntryDN, search_scope=ldap3.BASE, search_filter='(objectClass=oxTrustConfiguration)', attributes=['oxTrustConfApplication']) result = ldap_conn.response oxTrustConfApplication = json.loads( result[0]['attributes']['oxTrustConfApplication'][0]) elif default_storage == 'couchbase': bucket = setup_prop['couchbase_bucket_prefix'] s_key = get_key_from(gluu_ConfigurationDN) n1ql = 'SELECT * FROM `{}` USE KEYS "{}"'.format(format(bucket), s_key) result = get_cb_result(cbm, n1ql) if result: if 'gluuIpAddress' in result[0][bucket]: setup_prop['ip'] = str(result[0][bucket]['gluuIpAddress']) setup_prop['cache_provider_type'] = str( result[0][bucket]['oxCacheConfiguration']['cacheProviderType']) s_key = get_key_from(oxidp_ConfigurationEntryDN) n1ql = 'SELECT oxConfApplication FROM `{}` USE KEYS "{}"'.format( format(bucket), s_key) result = get_cb_result(cbm, n1ql) if result: oxConfApplication = result[0]['oxConfApplication'] s_key = get_key_from(oxauth_ConfigurationEntryDN) n1ql = 'SELECT oxAuthConfDynamic FROM `{}` USE KEYS "{}"'.format( format(bucket), s_key) result = get_cb_result(cbm, n1ql) if result: oxAuthConfDynamic = result[0]['oxAuthConfDynamic'] s_key = get_key_from(oxtrust_ConfigurationEntryDN) n1ql = 'SELECT oxTrustConfApplication FROM `{}` USE KEYS "{}"'.format( format(bucket), s_key) result = get_cb_result(cbm, n1ql) if result: oxTrustConfApplication = result[0]['oxTrustConfApplication'] if oxTrustConfApplication: if 'apiUmaClientId' in oxTrustConfApplication: setup_prop['oxtrust_resource_server_client_id'] = str( oxTrustConfApplication['apiUmaClientId']) if 'apiUmaClientKeyStorePassword' in oxTrustConfApplication: setup_prop['api_rs_client_jks_pass'] = str( unobscure( oxTrustConfApplication['apiUmaClientKeyStorePassword'])) if 'apiUmaResourceId' in oxTrustConfApplication: setup_prop['oxtrust_resource_id'] = str( oxTrustConfApplication['apiUmaResourceId']) setup_prop['shibJksPass'] = str( unobscure(oxTrustConfApplication['idpSecurityKeyPassword'])) setup_prop['admin_email'] = str( oxTrustConfApplication['orgSupportEmail']) if 'organizationName' in oxTrustConfApplication: setup_prop['orgName'] = str( oxTrustConfApplication['organizationName']) setup_prop['oxauth_client_id'] = str( oxTrustConfApplication['oxAuthClientId']) setup_prop['oxauthClient_pw'] = str( unobscure(oxTrustConfApplication['oxAuthClientPassword'])) if 'scimUmaClientId' in oxTrustConfApplication: setup_prop['scim_rs_client_id'] = str( oxTrustConfApplication['scimUmaClientId']) if 'scimUmaClientId' in oxTrustConfApplication: setup_prop['scim_resource_oxid'] = str( oxTrustConfApplication['scimUmaResourceId']) if 'scimTestMode' in oxTrustConfApplication: setup_prop['scimTestMode'] = oxTrustConfApplication['scimTestMode'] if 'apiUmaClientKeyStorePassword' in oxTrustConfApplication: setup_prop['api_rp_client_jks_pass'] = unobscure( oxTrustConfApplication['apiUmaClientKeyStorePassword']) setup_prop['api_rs_client_jks_fn'] = str( oxTrustConfApplication['apiUmaClientKeyStoreFile']) if 'scimUmaClientKeyStorePassword' in oxTrustConfApplication: setup_prop['scim_rs_client_jks_pass'] = unobscure( oxTrustConfApplication['scimUmaClientKeyStorePassword']) setup_prop['scim_rs_client_jks_fn'] = str( oxTrustConfApplication['scimUmaClientKeyStoreFile']) if oxConfApplication: setup_prop['idpClient_pw'] = str( unobscure(oxConfApplication['openIdClientPassword'])) setup_prop['idp_client_id'] = str(oxConfApplication['openIdClientId']) if oxAuthConfDynamic: o_issuer = urlparse(oxAuthConfDynamic['issuer']) setup_prop['hostname'] = str(o_issuer.netloc) setup_prop[ 'oxauth_openidScopeBackwardCompatibility'] = oxAuthConfDynamic.get( 'openidScopeBackwardCompatibility', False) if 'pairwiseCalculationSalt' in oxAuthConfDynamic: setup_prop['pairwiseCalculationSalt'] = str( oxAuthConfDynamic['pairwiseCalculationSalt']) if 'legacyIdTokenClaims' in oxAuthConfDynamic: setup_prop['oxauth_legacyIdTokenClaims'] = oxAuthConfDynamic[ 'legacyIdTokenClaims'] if 'pairwiseCalculationKey' in oxAuthConfDynamic: setup_prop['pairwiseCalculationKey'] = str( oxAuthConfDynamic['pairwiseCalculationKey']) if 'keyStoreFile' in oxAuthConfDynamic: setup_prop['oxauth_openid_jks_fn'] = str( oxAuthConfDynamic['keyStoreFile']) if 'keyStoreSecret' in oxAuthConfDynamic: setup_prop['oxauth_openid_jks_pass'] = str( oxAuthConfDynamic['keyStoreSecret']) ssl_subj = get_ssl_subject('/etc/certs/httpd.crt') setup_prop['countryCode'] = ssl_subj['C'] setup_prop['state'] = ssl_subj['ST'] setup_prop['city'] = ssl_subj['L'] setup_prop['city'] = ssl_subj['L'] setup_prop['oxtrust_admin_password'] = setup_prop['ldapPass'] if not 'orgName' in setup_prop: setup_prop['orgName'] = ssl_subj['O'] for service in jetty_services: setup_prop[jetty_services[service][0]] = os.path.exists( '/opt/gluu/jetty/{0}/webapps/{0}.war'.format(service)) if setup_prop['installSaml']: setup_prop['gluuSamlEnabled'] = True if os.path.exists('/opt/gluu/node/passport/server'): setup_prop['installPassport'] = True application_max_ram = 3072 default_dir = '/etc/default' usedRatio = 0.001 oxauth_max_heap_mem = 0 for service in jetty_services: service_default_fn = os.path.join(default_dir, service) if os.path.exists(service_default_fn): usedRatio += jetty_services[service][1] if service == 'oxauth': service_prop = read_properties_file(service_default_fn) m = re.search('-Xmx(\d*)m', service_prop['JAVA_OPTIONS']) oxauth_max_heap_mem = int(m.groups()[0]) if oxauth_max_heap_mem: ratioMultiplier = 1.0 + (1.0 - usedRatio) / usedRatio applicationMemory = oxauth_max_heap_mem / jetty_services['oxauth'][2] allowedRatio = jetty_services['oxauth'][1] * ratioMultiplier application_max_ram = int(round(applicationMemory / allowedRatio)) setup_prop['application_max_ram'] = application_max_ram if os.path.exists(os.path.join(default_dir, 'gluu-radius')): setup_prop['gluuRadiusEnabled'] = True setup_prop['oxauth_openidScopeBackwardCompatibility'] = True setup_prop['os_type'] = os_type setup_prop['os_version'] = os_version https_gluu_fn = '/etc/httpd/conf.d/https_gluu.conf' if setup_prop[ 'os_type'] in ( 'red', 'fedora', 'centos') else '/etc/apache2/sites-available/https_gluu.conf' setup_prop['installHTTPD'] = os.path.exists(https_gluu_fn) setup_prop['mappingLocations'] = mappingLocations asimba_xml = '/opt/tomcat/webapps/asimba/WEB-INF/conf/asimba.xml' if os.path.exists(asimba_xml): for l in open(asimba_xml): m = re.search('<password>(.*)</password>', '<password>p49IXMHN06SL</password>') if m: setup_prop['asimbaJksPass'] = m.groups()[0] if not 'inumOrg' in setup_prop: setup_prop['inumOrg'] = setup_prop['admin_inum'].split('!0000!')[0] if not 'githubBranchName' in setup_prop: setup_prop['githubBranchName'] = 'version_' + gluu_version return setup_prop
def _generate_csr_from_private_key(order_model, project_model): """Generate a CSR from the private key. :param: order_model - order for the request :param: project_model - project for this request :return: CSR (certificate signing request) in PEM format :raise: :class:`StoredKeyPrivateKeyNotFound` if private key not found :class:`StoredKeyContainerNotFound` if container not found """ container_id, container = _get_container_from_order_meta(order_model, project_model) if not container: raise excep.StoredKeyContainerNotFound(container_id) passphrase = None private_key = None for cs in container.container_secrets: secret_repo = repos.get_secret_repository() if cs.name == 'private_key': private_key_model = secret_repo.get( cs.secret_id, project_model.external_id) private_key = plugin.get_secret( 'application/pkcs8', private_key_model, project_model) elif cs.name == 'private_key_passphrase': passphrase_model = secret_repo.get( cs.secret_id, project_model.external_id) passphrase = plugin.get_secret( 'text/plain;charset=utf-8', passphrase_model, project_model) passphrase = str(passphrase) if not private_key: raise excep.StoredKeyPrivateKeyNotFound(container.id) if passphrase is None: pkey = crypto.load_privatekey( crypto.FILETYPE_PEM, private_key ) else: pkey = crypto.load_privatekey( crypto.FILETYPE_PEM, private_key, passphrase.encode('utf-8') ) subject_name = order_model.meta.get('subject_dn') subject_name_dns = parse_dn(subject_name) extensions = order_model.meta.get('extensions', None) req = crypto.X509Req() subj = req.get_subject() # Note: must iterate over the DNs in reverse order, or the resulting # subject name will be reversed. for ava in reversed(subject_name_dns): key, val, extra = ava setattr(subj, key.upper(), val) req.set_pubkey(pkey) if extensions: # TODO(alee-3) We need code here to parse the encoded extensions and # convert them into X509Extension objects. This code will also be # used in the validation code. Commenting out for now till we figure # out how to do this. # req.add_extensions(extensions) pass req.sign(pkey, 'sha256') csr = crypto.dump_certificate_request(crypto.FILETYPE_PEM, req) return csr
def collect(self): print("Please wait while collectiong properties...") self.logIt("Previously installed instance. Collecting properties") salt_fn = os.path.join(Config.configFolder,'salt') if os.path.exists(salt_fn): salt_prop = base.read_properties_file(salt_fn) Config.encode_salt = salt_prop['encodeSalt'] gluu_prop = base.read_properties_file(Config.gluu_properties_fn) Config.persistence_type = gluu_prop['persistence.type'] oxauth_ConfigurationEntryDN = gluu_prop['oxauth_ConfigurationEntryDN'] oxtrust_ConfigurationEntryDN = gluu_prop['oxtrust_ConfigurationEntryDN'] oxidp_ConfigurationEntryDN = gluu_prop['oxidp_ConfigurationEntryDN'] gluu_ConfigurationDN = 'ou=configuration,o=gluu' if Config.persistence_type == 'couchbase': Config.mappingLocations = { group: 'couchbase' for group in Config.couchbaseBucketDict } default_storage = 'couchbase' if Config.persistence_type != 'ldap' and os.path.exists(Config.gluuCouchebaseProperties): gluu_cb_prop = base.read_properties_file(Config.gluuCouchebaseProperties) Config.couchebaseClusterAdmin = gluu_cb_prop['auth.userName'] Config.encoded_cb_password = gluu_cb_prop['auth.userPassword'] Config.cb_password = self.unobscure(gluu_cb_prop['auth.userPassword']) Config.couchbase_bucket_prefix = gluu_cb_prop['bucket.default'] Config.couchbase_hostname = gluu_cb_prop['servers'].split(',')[0].strip() Config.encoded_couchbaseTrustStorePass = gluu_cb_prop['ssl.trustStore.pin'] Config.couchbaseTrustStorePass = self.unobscure(gluu_cb_prop['ssl.trustStore.pin']) if Config.persistence_type != 'couchbase' and os.path.exists(Config.ox_ldap_properties): gluu_ldap_prop = base.read_properties_file(Config.ox_ldap_properties) Config.ldap_binddn = gluu_ldap_prop['bindDN'] Config.ldapPass = self.unobscure(gluu_ldap_prop['bindPassword']) Config.opendj_p12_pass = self.unobscure(gluu_ldap_prop['ssl.trustStorePin']) Config.ldap_hostname, Config.ldaps_port = gluu_ldap_prop['servers'].split(',')[0].split(':') if Config.persistence_type in ['hybrid']: gluu_hybrid_properties = base.read_properties_file(gluu_hybrid_properties_fn) Config.mappingLocations = {'default': gluu_hybrid_properties['storage.default']} storages = [ storage.strip() for storage in gluu_hybrid_properties['storages'].split(',') ] for ml, m in (('user', 'people'), ('cache', 'cache'), ('site', 'cache-refresh'), ('token', 'tokens')): for storage in storages: if m in gluu_hybrid_properties.get('storage.{}.mapping'.format(storage),[]): Config.mappingLocations[ml] = storage if not Config.get('couchbase_bucket_prefix'): Config.couchbase_bucket_prefix = 'gluu' # It is time to bind database dbUtils.bind() result = dbUtils.search('ou=clients,o=gluu', search_filter='(inum=1701.*)', search_scope=ldap3.SUBTREE) if result: Config.gluu_radius_client_id = result['inum'] Config.gluu_ro_encoded_pw = result['oxAuthClientSecret'] Config.gluu_ro_pw = self.unobscure(Config.gluu_ro_encoded_pw) result = dbUtils.search('inum=5866-4202,ou=scripts,o=gluu', search_scope=ldap3.BASE) if result: Config.enableRadiusScripts = result['oxEnabled'] result = dbUtils.search('ou=clients,o=gluu', search_filter='(inum=1402.*)', search_scope=ldap3.SUBTREE) if result: Config.oxtrust_requesting_party_client_id = result['inum'] admin_dn = None result = dbUtils.search('o=gluu', search_filter='(gluuGroupType=gluuManagerGroup)', search_scope=ldap3.SUBTREE) if result: admin_dn = result['member'][0] if admin_dn: for rd in dnutils.parse_dn(admin_dn): if rd[0] == 'inum': Config.admin_inum = str(rd[1]) break oxConfiguration = dbUtils.search(gluu_ConfigurationDN, search_scope=ldap3.BASE) if 'gluuIpAddress' in oxConfiguration: Config.ip = oxConfiguration['gluuIpAddress'] oxCacheConfiguration = json.loads(oxConfiguration['oxCacheConfiguration']) Config.cache_provider_type = str(oxCacheConfiguration['cacheProviderType']) result = dbUtils.search(oxidp_ConfigurationEntryDN, search_filter='(objectClass=oxApplicationConfiguration)', search_scope=ldap3.BASE) if result: oxConfApplication = json.loads(result['oxConfApplication']) Config.idpClient_encoded_pw = oxConfApplication['openIdClientPassword'] Config.idpClient_pw = self.unobscure(Config.idpClient_encoded_pw) Config.idp_client_id = oxConfApplication['openIdClientId'] if 'openIdClientPassword' in oxConfApplication: Config.idpClient_pw = self.unobscure(oxConfApplication['openIdClientPassword']) if 'openIdClientId' in oxConfApplication: Config.idp_client_id = oxConfApplication['openIdClientId'] dn_oxauth, oxAuthConfDynamic = dbUtils.get_oxAuthConfDynamic() dn_oxtrust, oxTrustConfApplication = dbUtils.get_oxTrustConfApplication() if 'apiUmaClientId' in oxTrustConfApplication: Config.oxtrust_resource_server_client_id = oxTrustConfApplication['apiUmaClientId'] if 'apiUmaClientKeyStorePassword' in oxTrustConfApplication: Config.api_rs_client_jks_pass = self.unobscure(oxTrustConfApplication['apiUmaClientKeyStorePassword']) if 'apiUmaResourceId' in oxTrustConfApplication: Config.oxtrust_resource_id = oxTrustConfApplication['apiUmaResourceId'] if 'idpSecurityKeyPassword' in oxTrustConfApplication: Config.encoded_shib_jks_pw = oxTrustConfApplication['idpSecurityKeyPassword'] Config.shibJksPass = self.unobscure(Config.encoded_shib_jks_pw) Config.admin_email = oxTrustConfApplication['orgSupportEmail'] if 'organizationName' in oxTrustConfApplication: Config.orgName = oxTrustConfApplication['organizationName'] Config.oxauth_client_id = oxTrustConfApplication['oxAuthClientId'] Config.oxauthClient_pw = self.unobscure(oxTrustConfApplication['oxAuthClientPassword']) Config.oxauthClient_encoded_pw = oxTrustConfApplication['oxAuthClientPassword'] Config.scim_rp_client_jks_pass = '******' # this is static if 'scimUmaClientId' in oxTrustConfApplication: Config.scim_rs_client_id = oxTrustConfApplication['scimUmaClientId'] if 'scimUmaClientId' in oxTrustConfApplication: Config.scim_resource_oxid = oxTrustConfApplication['scimUmaResourceId'] if 'scimTestMode' in oxTrustConfApplication: Config.scimTestMode = oxTrustConfApplication['scimTestMode'] if 'apiUmaClientKeyStorePassword' in oxTrustConfApplication: Config.api_rp_client_jks_pass = self.unobscure(oxTrustConfApplication['apiUmaClientKeyStorePassword']) Config.api_rs_client_jks_fn = oxTrustConfApplication['apiUmaClientKeyStoreFile'] if 'scimUmaClientKeyStorePassword' in oxTrustConfApplication: Config.scim_rs_client_jks_pass = self.unobscure(oxTrustConfApplication['scimUmaClientKeyStorePassword']) Config.scim_rs_client_jks_fn = str(oxTrustConfApplication['scimUmaClientKeyStoreFile']) # Other clients client_var_id_list = ( ('scim_rp_client_id', '1202.'), ('passport_rs_client_id', '1501.'), ('passport_rp_client_id', '1502.'), ('passport_rp_ii_client_id', '1503.'), ('gluu_radius_client_id', '1701.'), ) self.check_clients(client_var_id_list) self.check_clients([('passport_resource_id', '1504.')]) o_issuer = urlparse(oxAuthConfDynamic['issuer']) Config.hostname = str(o_issuer.netloc) Config.oxauth_openidScopeBackwardCompatibility = oxAuthConfDynamic.get('openidScopeBackwardCompatibility', False) if 'pairwiseCalculationSalt' in oxAuthConfDynamic: Config.pairwiseCalculationSalt = oxAuthConfDynamic['pairwiseCalculationSalt'] if 'legacyIdTokenClaims' in oxAuthConfDynamic: Config.oxauth_legacyIdTokenClaims = oxAuthConfDynamic['legacyIdTokenClaims'] if 'pairwiseCalculationKey' in oxAuthConfDynamic: Config.pairwiseCalculationKey = oxAuthConfDynamic['pairwiseCalculationKey'] if 'keyStoreFile' in oxAuthConfDynamic: Config.oxauth_openid_jks_fn = oxAuthConfDynamic['keyStoreFile'] if 'keyStoreSecret' in oxAuthConfDynamic: Config.oxauth_openid_jks_pass = oxAuthConfDynamic['keyStoreSecret'] ssl_subj = self.get_ssl_subject('/etc/certs/httpd.crt') Config.countryCode = ssl_subj['C'] Config.state = ssl_subj['ST'] Config.city = ssl_subj['L'] Config.city = ssl_subj['L'] #this is not good, but there is no way to retreive password from ldap if not Config.get('oxtrust_admin_password'): if Config.get('ldapPass'): Config.oxtrust_admin_password = Config.ldapPass elif Config.get('cb_password'): Config.oxtrust_admin_password = Config.cb_password if not Config.get('orgName'): Config.orgName = ssl_subj['O'] #for service in jetty_services: # setup_prop[jetty_services[service][0]] = os.path.exists('/opt/gluu/jetty/{0}/webapps/{0}.war'.format(service)) for s in ('gluuPassportEnabled', 'gluuRadiusEnabled', 'gluuSamlEnabled', 'gluuScimEnabled'): setattr(Config, s, oxConfiguration.get(s, False)) application_max_ram = 3072 default_dir = '/etc/default' usedRatio = 0.001 oxauth_max_heap_mem = 0 jetty_services = JettyInstaller.jetty_app_configuration for service in jetty_services: service_default_fn = os.path.join(default_dir, service) if os.path.exists(service_default_fn): usedRatio += jetty_services[service]['memory']['ratio'] if service == 'oxauth': service_prop = base.read_properties_file(service_default_fn) m = re.search('-Xmx(\d*)m', service_prop['JAVA_OPTIONS']) oxauth_max_heap_mem = int(m.groups()[0]) if oxauth_max_heap_mem: ratioMultiplier = 1.0 + (1.0 - usedRatio)/usedRatio applicationMemory = oxauth_max_heap_mem / jetty_services['oxauth']['memory']['jvm_heap_ration'] allowedRatio = jetty_services['oxauth']['memory']['ratio'] * ratioMultiplier application_max_ram = int(round(applicationMemory / allowedRatio)) if Config.get('gluuRadiusEnabled'): Config.oxauth_openidScopeBackwardCompatibility = True Config.os_type = base.os_type Config.os_version = base.os_version if not Config.get('ip'): Config.ip = self.detect_ip()
def search(self, search_base, search_filter='(objectClass=*)', search_scope=ldap3.LEVEL, fetchmany=False): base.logIt("Searching database for dn {} with filter {}".format(search_base, search_filter)) backend_location = self.get_backend_location_for_dn(search_base) if backend_location == BackendTypes.LDAP: if self.ldap_conn.search(search_base=search_base, search_filter=search_filter, search_scope=search_scope, attributes=['*']): if not fetchmany: key, document = ldif_utils.get_document_from_entry(self.ldap_conn.response[0]['dn'], self.ldap_conn.response[0]['attributes']) return document documents = [] for result in self.ldap_conn.response: key, document = ldif_utils.get_document_from_entry(result['dn'], result['attributes']) documents.append((key, document)) return documents if backend_location in (BackendTypes.MYSQL, BackendTypes.PGSQL, BackendTypes.SPANNER): if backend_location != BackendTypes.SPANNER and self.Base is None: self.rdm_automapper() s_table = None where_clause = '' search_list = [] if '&' in search_filter: re_match = re.match('\(&\((.*?)=(.*?)\)\((.*?)=(.*?)\)', search_filter) if re_match: re_list = re_match.groups() search_list.append((re_list[0], re_list[1])) search_list.append((re_list[2], re_list[3])) else: re_match = re.match('\((.*?)=(.*?)\)', search_filter) if re_match: re_list = re_match.groups() search_list.append((re_list[0], re_list[1])) for col, val in search_list: if col.lower() == 'objectclass': s_table = val break if not s_table: return if backend_location == BackendTypes.SPANNER: if fetchmany: retVal = [] else: retVal = {} for col, val in search_list: if val == '*': continue if col.lower() == 'objectclass': s_table = val else: val = val.replace('*', '%') q_operator = 'LIKE' if '%' in val else '=' where_clause = 'AND {} {} "{}"'.format(col, q_operator, val) if not s_table: return retVal if search_scope == ldap3.BASE: dn_clause = 'dn = "{}"'.format(search_base) else: dn_clause = 'dn LIKE "%{}"'.format(search_base) sql_cmd = 'SELECT * FROM {} WHERE ({}) {}'.format(s_table, dn_clause, where_clause) data = self.spanner.exec_sql(sql_cmd) if not data.get('rows'): return retVal n = len(data['rows']) if fetchmany else 1 for j in range(n): row = data['rows'][j] row_dict = {} for i, field in enumerate(data['fields']): val = row[i] if val: if field['type'] == 'INT64': val = int(val) row_dict[field['name']] = val if fetchmany: retVal.append(row_dict) else: retVal = row_dict break return retVal sqlalchemy_table = self.Base.classes[s_table] sqlalchemyQueryObject = self.session.query(sqlalchemy_table) for col, val in search_list: if val == '*': continue if col.lower() != 'objectclass': val = val.replace('*', '%') sqlalchemyCol = getattr(sqlalchemy_table, col) if '%' in val: sqlalchemyQueryObject = sqlalchemyQueryObject.filter(sqlalchemyCol.like(val)) else: sqlalchemyQueryObject = sqlalchemyQueryObject.filter(sqlalchemyCol == val) if search_scope == ldap3.BASE: sqlalchemyQueryObject = sqlalchemyQueryObject.filter(sqlalchemy_table.dn == search_base) else: sqlalchemyQueryObject = sqlalchemyQueryObject.filter(sqlalchemy_table.dn.like('%'+search_base)) if fetchmany: result = sqlalchemyQueryObject.all() return [ item.__dict__ for item in result ] else: result = sqlalchemyQueryObject.first() if result: return result.__dict__ if backend_location == BackendTypes.COUCHBASE: key = ldif_utils.get_key_from(search_base) bucket = self.get_bucket_for_key(key) if search_scope == ldap3.BASE: n1ql = 'SELECT * FROM `{}` USE KEYS "{}"'.format(bucket, key) else: if '&' in search_filter: re_match = re.match('\(&\((.*?)\)\((.*?)\)\)', search_filter) if re_match: re_list = re_match.groups() dn_to_parse = re_list[0] if 'objectclass' in re_list[1].lower() else re_list[1] else: dn_to_parse = search_filter.strip('(').strip(')') parsed_dn = dnutils.parse_dn(dn_to_parse) attr = parsed_dn[0][0] val = parsed_dn[0][1] if '*' in val: search_clause = 'LIKE "{}"'.format(val.replace('*', '%')) else: search_clause = '="{}"'.format(val.replace('*', '%')) n1ql = 'SELECT * FROM `{}` WHERE `{}` {}'.format(bucket, attr, search_clause) result = self.cbm.exec_query(n1ql) if result.ok: data = result.json() if data.get('results'): if fetchmany: return [ item[bucket] for item in data['results'] ] else: return data['results'][0][bucket]
def import_ldif(self, ldif_files, bucket=None, force=None): base.logIt("Importing ldif file(s): {} ".format(', '.join(ldif_files))) sql_data_fn = os.path.join(Config.outputFolder, Config.rdbm_type, 'jans_data.sql') for ldif_fn in ldif_files: base.logIt("Importing entries from " + ldif_fn) parser = ldif_utils.myLdifParser(ldif_fn) parser.parse() for dn, entry in parser.entries: backend_location = force if force else self.get_backend_location_for_dn(dn) if backend_location == BackendTypes.LDAP: if 'add' in entry and 'changetype' in entry: base.logIt("LDAP modify add dn:{} entry:{}".format(dn, dict(entry))) change_attr = entry['add'][0] ldap_operation_result = self.ldap_conn.modify(dn, {change_attr: [(ldap3.MODIFY_ADD, entry[change_attr])]}) self.log_ldap_result(ldap_operation_result) elif 'replace' in entry and 'changetype' in entry: base.logIt("LDAP modify replace dn:{} entry:{}".format(dn, dict(entry))) change_attr = entry['replace'][0] ldap_operation_result = self.ldap_conn.modify(dn, {change_attr: [(ldap3.MODIFY_REPLACE, [entry[change_attr][0]])]}) self.log_ldap_result(ldap_operation_result) elif not self.dn_exists(dn): base.logIt("Adding LDAP dn:{} entry:{}".format(dn, dict(entry))) ldap_operation_result = self.ldap_conn.add(dn, attributes=entry) self.log_ldap_result(ldap_operation_result) elif backend_location in (BackendTypes.MYSQL, BackendTypes.PGSQL): if self.Base is None: self.rdm_automapper() if 'add' in entry and 'changetype' in entry: attribute = entry['add'][0] new_val = entry[attribute] sqlalchObj = self.get_sqlalchObj_for_dn(dn) if sqlalchObj: if isinstance(sqlalchObj.__table__.columns[attribute].type, self.json_dialects_instance): cur_val = copy.deepcopy(getattr(sqlalchObj, attribute)) for val_ in new_val: cur_val['v'].append(val_) setattr(sqlalchObj, attribute, cur_val) else: setattr(sqlalchObj, attribute, new_val[0]) self.session.commit() else: base.logIt("Can't find current value for repmacement of {}".replace(str(entry)), True) continue elif 'replace' in entry and 'changetype' in entry: attribute = entry['replace'][0] new_val = self.get_rdbm_val(attribute, entry[attribute]) sqlalchObj = self.get_sqlalchObj_for_dn(dn) if sqlalchObj: setattr(sqlalchObj, attribute, new_val) self.session.commit() else: base.logIt("Can't find current value for repmacement of {}".replace(str(entry)), True) continue else: vals = {} dn_parsed = dnutils.parse_dn(dn) rdn_name = dn_parsed[0][0] objectClass = entry.get('objectClass') or entry.get('objectclass') if objectClass: if 'top' in objectClass: objectClass.remove('top') if len(objectClass) == 1 and objectClass[0].lower() == 'organizationalunit': continue objectClass = objectClass[-1] vals['doc_id'] = dn_parsed[0][1] vals['dn'] = dn vals['objectClass'] = objectClass #entry.pop(rdn_name) if 'objectClass' in entry: entry.pop('objectClass') elif 'objectclass' in entry: entry.pop('objectclass') table_name = objectClass if self.dn_exists_rdbm(dn, table_name): base.logIt("DN {} exsits in {} skipping".format(dn, Config.rdbm_type)) continue for lkey in entry: vals[lkey] = self.get_rdbm_val(lkey, entry[lkey]) sqlalchCls = self.Base.classes[table_name] for col in sqlalchCls.__table__.columns: if isinstance(col.type, self.json_dialects_instance) and not col.name in vals: vals[col.name] = {'v': []} sqlalchObj = sqlalchCls() for v in vals: setattr(sqlalchObj, v, vals[v]) base.logIt("Adding {}".format(sqlalchObj.doc_id)) self.session.add(sqlalchObj) self.session.commit() elif backend_location == BackendTypes.COUCHBASE: if len(entry) < 3: continue key, document = ldif_utils.get_document_from_entry(dn, entry) cur_bucket = bucket if bucket else self.get_bucket_for_dn(dn) base.logIt("Addnig document {} to Couchebase bucket {}".format(key, cur_bucket)) n1ql_list = [] if 'changetype' in document: if 'replace' in document: attribute = document['replace'] n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=%s' % (cur_bucket, key, attribute, json.dumps(document[attribute]))) elif 'add' in document: attribute = document['add'] result = self.check_attribute_exists(key, attribute) data = document[attribute] if result: if isinstance(data, list): for d in data: n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)' % (cur_bucket, key, attribute, attribute, json.dumps(d))) else: n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)' % (cur_bucket, key, attribute, attribute, json.dumps(data))) else: if attribute in attribDataTypes.listAttributes and not isinstance(data, list): data = [data] n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=%s' % (cur_bucket, key, attribute, json.dumps(data))) else: for k in document: try: kdata = json.loads(document[k]) if isinstance(kdata, dict): document[k] = kdata except: pass n1ql_list.append('UPSERT INTO `%s` (KEY, VALUE) VALUES ("%s", %s)' % (cur_bucket, key, json.dumps(document))) for q in n1ql_list: self.cbm.exec_query(q)
def get_doc_id_from_dn(self, dn): dn_parsed = dnutils.parse_dn(dn) return dn_parsed[0][1]
def import_ldif(self, ldif_files, bucket=None, force=None): base.logIt("Importing ldif file(s): {} ".format(', '.join(ldif_files))) sql_data_fn = os.path.join(Config.outputFolder, Config.rdbm_type, 'jans_data.sql') for ldif_fn in ldif_files: base.logIt("Importing entries from " + ldif_fn) parser = ldif_utils.myLdifParser(ldif_fn) parser.parse() for dn, entry in parser.entries: backend_location = force if force else self.get_backend_location_for_dn(dn) if backend_location == BackendTypes.LDAP: if 'add' in entry and 'changetype' in entry: base.logIt("LDAP modify add dn:{} entry:{}".format(dn, dict(entry))) change_attr = entry['add'][0] ldap_operation_result = self.ldap_conn.modify(dn, {change_attr: [(ldap3.MODIFY_ADD, entry[change_attr])]}) self.log_ldap_result(ldap_operation_result) elif 'replace' in entry and 'changetype' in entry: base.logIt("LDAP modify replace dn:{} entry:{}".format(dn, dict(entry))) change_attr = entry['replace'][0] ldap_operation_result = self.ldap_conn.modify(dn, {change_attr: [(ldap3.MODIFY_REPLACE, [entry[change_attr][0]])]}) self.log_ldap_result(ldap_operation_result) elif not self.dn_exists(dn): base.logIt("Adding LDAP dn:{} entry:{}".format(dn, dict(entry))) ldap_operation_result = self.ldap_conn.add(dn, attributes=entry) self.log_ldap_result(ldap_operation_result) elif backend_location in (BackendTypes.MYSQL, BackendTypes.PGSQL): if self.Base is None: self.rdm_automapper() # TODO: inserting data to sub tables to be implemented for mysql and pgsql if 'add' in entry and 'changetype' in entry: attribute = entry['add'][0] new_val = entry[attribute] sqlalchObj = self.get_sqlalchObj_for_dn(dn) if sqlalchObj: if isinstance(sqlalchObj.__table__.columns[attribute].type, self.json_dialects_instance): cur_val = copy.deepcopy(getattr(sqlalchObj, attribute)) for val_ in new_val: cur_val['v'].append(val_) setattr(sqlalchObj, attribute, cur_val) else: setattr(sqlalchObj, attribute, new_val[0]) self.session.commit() else: base.logIt("Can't find current value for repmacement of {}".replace(str(entry)), True) continue elif 'replace' in entry and 'changetype' in entry: attribute = entry['replace'][0] new_val = self.get_rdbm_val(attribute, entry[attribute]) sqlalchObj = self.get_sqlalchObj_for_dn(dn) if sqlalchObj: setattr(sqlalchObj, attribute, new_val) self.session.commit() else: base.logIt("Can't find current value for repmacement of {}".replace(str(entry)), True) continue else: vals = {} dn_parsed = dnutils.parse_dn(dn) rdn_name = dn_parsed[0][0] objectClass = self.get_clean_objcet_class(entry) if objectClass.lower() == 'organizationalunit': continue vals['doc_id'] = dn_parsed[0][1] vals['dn'] = dn vals['objectClass'] = objectClass #entry.pop(rdn_name) if 'objectClass' in entry: entry.pop('objectClass') elif 'objectclass' in entry: entry.pop('objectclass') table_name = objectClass if self.dn_exists_rdbm(dn, table_name): base.logIt("DN {} exsits in {} skipping".format(dn, Config.rdbm_type)) continue for lkey in entry: vals[lkey] = self.get_rdbm_val(lkey, entry[lkey]) sqlalchCls = self.Base.classes[table_name] for col in sqlalchCls.__table__.columns: if isinstance(col.type, self.json_dialects_instance) and not col.name in vals: vals[col.name] = {'v': []} sqlalchObj = sqlalchCls() for v in vals: setattr(sqlalchObj, v, vals[v]) base.logIt("Adding {}".format(sqlalchObj.doc_id)) self.session.add(sqlalchObj) self.session.commit() elif backend_location == BackendTypes.SPANNER: if 'add' in entry and 'changetype' in entry: table = self.get_spanner_table_for_dn(dn) doc_id = self.get_doc_id_from_dn(dn) change_attr = entry['add'][0] if table: doc_id = self.get_doc_id_from_dn(dn) if self.in_subtable(table, change_attr): sub_table = '{}_{}'.format(table, change_attr) for subval in entry[change_attr]: typed_val = self.get_rdbm_val(change_attr, subval, rdbm_type='spanner') dict_doc_id = self.get_sha_digest(typed_val) self.spanner.insert_data(table=sub_table, columns=['doc_id', 'dict_doc_id', change_attr], values=[[doc_id, typed_val, typed_val]]) else: data = self.spanner.exec_sql('SELECT {} FROM {} WHERE doc_id="{}"'.format(entry['add'][0], table, doc_id)) if data.get('rows'): cur_data = [] if 'rows' in data and data['rows'] and data['rows'][0] and data['rows'][0][0]: cur_data = data['rows'][0][0] for cur_val in entry[change_attr]: typed_val = self.get_rdbm_val(change_attr, cur_val, rdbm_type='spanner') cur_data.append(typed_val) self.spanner.update_data(table=table, columns=['doc_id', change_attr], values=[[doc_id, cur_data]]) elif 'replace' in entry and 'changetype' in entry: table = self.get_spanner_table_for_dn(dn) doc_id = self.get_doc_id_from_dn(dn) replace_attr = entry['replace'][0] typed_val = self.get_rdbm_val(replace_attr, entry[replace_attr], rdbm_type='spanner') if self.in_subtable(table, replace_attr): sub_table = '{}_{}'.format(table, replace_attr) # TODO: how to replace ? #for subval in typed_val: # self.spanner.update_data(table=sub_table, columns=['doc_id', replace_attr], values=[[doc_id, subval]]) else: self.spanner.update_data(table=table, columns=['doc_id', replace_attr], values=[[doc_id, typed_val]]) else: vals = {} dn_parsed = dnutils.parse_dn(dn) rdn_name = dn_parsed[0][0] objectClass = objectClass = self.get_clean_objcet_class(entry) if objectClass.lower() == 'organizationalunit': continue doc_id = dn_parsed[0][1] vals['doc_id'] = doc_id vals['dn'] = dn vals['objectClass'] = objectClass if 'objectClass' in entry: entry.pop('objectClass') elif 'objectclass' in entry: entry.pop('objectclass') table_name = objectClass subtable_data = [] for lkey in entry: spanner_vals = self.get_rdbm_val(lkey, entry[lkey], rdbm_type='spanner') if not self.in_subtable(table_name, lkey): vals[lkey] = spanner_vals else: sub_table = '{}_{}'.format(table_name, lkey) sub_table_columns = ['doc_id', 'dict_doc_id', lkey] sub_table_values = [] for subtableval in spanner_vals: dict_doc_id = self.get_sha_digest(subtableval) sub_table_values.append([doc_id, dict_doc_id, subtableval]) subtable_data.append((sub_table, sub_table_columns, sub_table_values)) columns = [ *vals.keys() ] values = [ vals[lkey] for lkey in columns ] self.spanner.insert_data(table=table_name, columns=columns, values=[values]) for sdata in subtable_data: self.spanner.insert_data(table=sdata[0], columns=sdata[1], values=sdata[2]) elif backend_location == BackendTypes.COUCHBASE: if len(entry) < 3: continue key, document = ldif_utils.get_document_from_entry(dn, entry) cur_bucket = bucket if bucket else self.get_bucket_for_dn(dn) base.logIt("Addnig document {} to Couchebase bucket {}".format(key, cur_bucket)) n1ql_list = [] if 'changetype' in document: if 'replace' in document: attribute = document['replace'] n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=%s' % (cur_bucket, key, attribute, json.dumps(document[attribute]))) elif 'add' in document: attribute = document['add'] result = self.check_attribute_exists(key, attribute) data = document[attribute] if result: if isinstance(data, list): for d in data: n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)' % (cur_bucket, key, attribute, attribute, json.dumps(d))) else: n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)' % (cur_bucket, key, attribute, attribute, json.dumps(data))) else: if attribute in attribDataTypes.listAttributes and not isinstance(data, list): data = [data] n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=%s' % (cur_bucket, key, attribute, json.dumps(data))) else: for k in document: try: kdata = json.loads(document[k]) if isinstance(kdata, dict): document[k] = kdata except: pass n1ql_list.append('UPSERT INTO `%s` (KEY, VALUE) VALUES ("%s", %s)' % (cur_bucket, key, json.dumps(document))) for q in n1ql_list: self.cbm.exec_query(q)
def formatGroupsGrep(self, grouplist): outcache = [] for group in grouplist: cn = self.unescapecn(dn.parse_dn(group)[0][1]) outcache.append(cn) return ', '.join(outcache)