Exemple #1
0
 def import_schema(self, schema_file):
     if self.moddb == BackendTypes.LDAP:
         base.logIt("Importing schema {}".format(schema_file))
         parser = ldif_utils.myLdifParser(schema_file)
         parser.parse()
         for dn, entry in parser.entries:
             if 'changetype' in entry:
                 entry.pop('changetype')
             if 'add' in entry:
                 entry.pop('add')
             for entry_type in entry:
                 for e in entry[entry_type]:
                     base.logIt("Adding to schema, type: {}  value: {}".format(entry_type, e))
                     ldap_operation_result = self.ldap_conn.modify(dn, {entry_type: [ldap3.MODIFY_ADD, e]})
                     self.log_ldap_result(ldap_operation_result)
         #we need re-bind after schema operations
         self.ldap_conn.rebind()
    def load_test_data(self):
        self.logIt("Re-binding database")
        self.dbUtils.bind(force=True)

        self.logIt("Checking Internet conncetion")
        socket.setdefaulttimeout(3)
        try:
            socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect(
                ("8.8.8.8", 443))
        except:
            self.logIt("Failed to connect 8.8.8.8:443.", True)
            print("Test data loader needs internet connection. Giving up ...")
            return

        if not base.current_app.ScimInstaller.installed():
            self.logIt("Scim was not installed. Installing")
            Config.installScimServer = True
            base.current_app.ScimInstaller.start_installation()

        self.encode_test_passwords()

        self.logIt("Rendering test templates")

        if Config.rdbm_type == 'spanner':
            Config.rdbm_password_enc = ''

        Config.templateRenderingDict[
            'config_oxauth_test_ldap'] = '# Not available'
        Config.templateRenderingDict[
            'config_oxauth_test_couchbase'] = '# Not available'

        config_oxauth_test_properties = self.fomatWithDict(
            'server.name=%(hostname)s\nconfig.oxauth.issuer=http://localhost:80\nconfig.oxauth.contextPath=http://localhost:80\nconfig.oxauth.salt=%(encode_salt)s\nconfig.persistence.type=%(persistence_type)s\n\n',
            self.merge_dicts(Config.__dict__, Config.templateRenderingDict))

        if self.getMappingType('ldap'):
            template_text = self.readFile(
                os.path.join(
                    self.template_base,
                    'jans-auth/server/config-oxauth-test-ldap.properties.nrnd')
            )
            rendered_text = self.fomatWithDict(
                template_text,
                self.merge_dicts(Config.__dict__,
                                 Config.templateRenderingDict))
            config_oxauth_test_properties += '#ldap\n' + rendered_text

        if self.getMappingType('couchbase'):
            couchbaseDict = base.current_app.CouchbaseInstaller.couchbaseDict()
            template_text = self.readFile(
                os.path.join(
                    self.template_base,
                    'jans-auth/server/config-oxauth-test-couchbase.properties.nrnd'
                ))
            rendered_text = self.fomatWithDict(
                template_text,
                self.merge_dicts(Config.__dict__, Config.templateRenderingDict,
                                 couchbaseDict))
            config_oxauth_test_properties += '\n#couchbase\n' + rendered_text

        if self.getMappingType('rdbm'):
            base.current_app.RDBMInstaller.server_time_zone()
            template_text = self.readFile(
                os.path.join(
                    self.template_base,
                    'jans-auth/server/config-oxauth-test-sql.properties.nrnd'))
            rendered_text = self.fomatWithDict(
                template_text,
                self.merge_dicts(Config.__dict__,
                                 Config.templateRenderingDict))
            config_oxauth_test_properties += '\n#sql\n' + rendered_text

            self.logIt("Adding custom attributs and indexes")

            schema2json(
                os.path.join(Config.templateFolder,
                             'test/jans-auth/schema/102-oxauth_test.ldif'),
                os.path.join(Config.outputFolder, 'test/jans-auth/schema/'))
            schema2json(
                os.path.join(Config.templateFolder,
                             'test/scim-client/schema/103-scim_test.ldif'),
                os.path.join(Config.outputFolder, 'test/scim-client/schema/'),
            )

            oxauth_json_schema_fn = os.path.join(
                Config.outputFolder,
                'test/jans-auth/schema/102-oxauth_test.json')
            scim_json_schema_fn = os.path.join(
                Config.outputFolder,
                'test/scim-client/schema/103-scim_test.json')
            jans_schema_json_files = [
                oxauth_json_schema_fn, scim_json_schema_fn
            ]

            scim_schema = base.readJsonFile(scim_json_schema_fn)
            may_list = []

            for attribute in scim_schema['attributeTypes']:
                may_list += attribute['names']

            jansPerson = {
                'kind': 'STRUCTURAL',
                'may': may_list,
                'must': ['objectclass'],
                'names': ['jansPerson'],
                'oid': 'jansObjClass',
                'sup': ['top'],
                'x_origin': 'Jans created objectclass'
            }
            scim_schema['objectClasses'].append(jansPerson)

            with open(scim_json_schema_fn, 'w') as w:
                json.dump(scim_schema, w, indent=2)

            self.dbUtils.read_jans_schema(others=jans_schema_json_files)

            base.current_app.RDBMInstaller.create_tables(
                jans_schema_json_files)
            if Config.rdbm_type != 'spanner':
                self.dbUtils.rdm_automapper()

        self.writeFile(
            os.path.join(
                Config.outputFolder,
                'test/jans-auth/server/config-oxauth-test.properties'),
            config_oxauth_test_properties)

        self.render_templates_folder(self.template_base)

        self.logIt("Loading test ldif files")

        ox_auth_test_ldif = os.path.join(
            Config.outputFolder, 'test/jans-auth/data/oxauth-test-data.ldif')
        ox_auth_test_user_ldif = os.path.join(
            Config.outputFolder,
            'test/jans-auth/data/oxauth-test-data-user.ldif')

        scim_test_ldif = os.path.join(
            Config.outputFolder, 'test/scim-client/data/scim-test-data.ldif')
        scim_test_user_ldif = os.path.join(
            Config.outputFolder,
            'test/scim-client/data/scim-test-data-user.ldif')

        ldif_files = (ox_auth_test_ldif, scim_test_ldif,
                      ox_auth_test_user_ldif, scim_test_user_ldif)
        self.dbUtils.import_ldif(ldif_files)

        apache_user = '******' if base.clone_type == 'deb' else 'apache'

        # Client keys deployment
        base.download(
            'https://raw.githubusercontent.com/JanssenProject/jans-auth-server/master/client/src/test/resources/jans_test_client_keys.zip',
            '/var/www/html/jans_test_client_keys.zip')
        self.run([
            paths.cmd_unzip, '-o', '/var/www/html/jans_test_client_keys.zip',
            '-d', '/var/www/html/'
        ])
        self.run([paths.cmd_rm, '-rf', 'jans_test_client_keys.zip'])
        self.run([
            paths.cmd_chown, '-R', 'root:' + apache_user,
            '/var/www/html/jans-auth-client'
        ])

        oxAuthConfDynamic_changes = {
            'dynamicRegistrationCustomObjectClass':
            'jansClntCustomAttributes',
            'dynamicRegistrationCustomAttributes': [
                "jansTrustedClnt", "myCustomAttr1", "myCustomAttr2",
                "jansInclClaimsInIdTkn"
            ],
            'dynamicRegistrationExpirationTime':
            86400,
            'dynamicGrantTypeDefault': [
                "authorization_code", "implicit", "password",
                "client_credentials", "refresh_token",
                "urn:ietf:params:oauth:grant-type:uma-ticket",
                "urn:openid:params:grant-type:ciba",
                "urn:ietf:params:oauth:grant-type:device_code"
            ],
            'legacyIdTokenClaims':
            True,
            'authenticationFiltersEnabled':
            True,
            'clientAuthenticationFiltersEnabled':
            True,
            'keyRegenerationEnabled':
            True,
            'openidScopeBackwardCompatibility':
            False,
            'forceOfflineAccessScopeToEnableRefreshToken':
            False,
            'dynamicRegistrationPasswordGrantTypeEnabled':
            True,
            'cibaEnabled':
            True,
            'backchannelAuthenticationRequestSigningAlgValuesSupported': [
                "RS256", "RS384", "RS512", "ES256", "ES384", "ES512", "PS256",
                "PS384", "PS512"
            ],
            'backchannelClientId':
            '123-123-123',
            'backchannelUserCodeParameterSupported':
            True,
            'tokenEndpointAuthSigningAlgValuesSupported': [
                'HS256', 'HS384', 'HS512', 'RS256', 'RS384', 'RS512', 'ES256',
                'ES384', 'ES512', 'PS256', 'PS384', 'PS512'
            ],
            'userInfoSigningAlgValuesSupported': [
                'none', 'HS256', 'HS384', 'HS512', 'RS256', 'RS384', 'RS512',
                'ES256', 'ES384', 'ES512', 'PS256', 'PS384', 'PS512'
            ],
            'consentGatheringScriptBackwardCompatibility':
            False,
            'claimsParameterSupported':
            True,
            'grantTypesSupported': [
                'urn:openid:params:grant-type:ciba', 'authorization_code',
                'urn:ietf:params:oauth:grant-type:uma-ticket',
                'urn:ietf:params:oauth:grant-type:device_code',
                'client_credentials', 'implicit', 'refresh_token', 'password'
            ],
            'idTokenSigningAlgValuesSupported': [
                'none', 'HS256', 'HS384', 'HS512', 'RS256', 'RS384', 'RS512',
                'ES256', 'ES384', 'ES512', 'PS256', 'PS384', 'PS512'
            ],
            'requestObjectSigningAlgValuesSupported': [
                'none', 'HS256', 'HS384', 'HS512', 'RS256', 'RS384', 'RS512',
                'ES256', 'ES384', 'ES512', 'PS256', 'PS384', 'PS512'
            ],
            'softwareStatementValidationClaimName':
            'jwks_uri',
            'softwareStatementValidationType':
            'jwks_uri',
            'umaGrantAccessIfNoPolicies':
            True,
            'rejectJwtWithNoneAlg':
            False,
            'removeRefreshTokensForClientOnLogout':
            True,
            'fapiCompatibility':
            False,
            'forceIdTokenHintPrecense':
            False,
            'introspectionScriptBackwardCompatibility':
            False,
            'spontaneousScopeLifetime':
            0,
            'tokenEndpointAuthMethodsSupported': [
                'client_secret_basic', 'client_secret_post',
                'client_secret_jwt', 'private_key_jwt', 'tls_client_auth',
                'self_signed_tls_client_auth', 'none'
            ],
            'sessionIdRequestParameterEnabled':
            True,
            'skipRefreshTokenDuringRefreshing':
            False,
            'enabledComponents': [
                'unknown', 'health_check', 'userinfo', 'clientinfo',
                'id_generation', 'registration', 'introspection',
                'revoke_token', 'revoke_session', 'end_session',
                'status_session', 'jans_configuration', 'ciba', 'uma', 'u2f',
                'device_authz', 'stat'
            ]
        }

        if Config.get('config_patch_creds'):
            data = None
            datajs = None
            patch_url = 'https://ox.gluu.org/protected/jans-auth/jans-auth-test-config-patch.json'
            req = urllib.request.Request(patch_url)
            credentials = Config.get('config_patch_creds')
            encoded_credentials = base64.b64encode(credentials.encode('ascii'))
            req.add_header('Authorization',
                           'Basic %s' % encoded_credentials.decode("ascii"))
            self.logIt("Retreiving auto test ciba patch from " + patch_url)

            try:
                resp = urllib.request.urlopen(req)
                data = resp.read()
                self.logIt("Auto test ciba patch retreived")
            except:
                self.logIt("Can't retreive auto test ciba patch", True)

            if data:
                try:
                    datajs = json.loads(data.decode())
                except:
                    self.logIt("Can't decode json for auto test ciba patch",
                               True)

            if datajs:
                oxAuthConfDynamic_changes.update(datajs)
                self.logIt(
                    "oxAuthConfDynamic was updated with auto test ciba patch")

        custom_scripts = ('2DAF-F995', '2DAF-F996', '4BBE-C6A8', 'A51E-76DA')

        self.dbUtils.set_oxAuthConfDynamic(oxAuthConfDynamic_changes)

        # Enable custom scripts
        for inum in custom_scripts:
            self.dbUtils.enable_script(inum)

        if self.dbUtils.moddb == static.BackendTypes.LDAP:
            # Update LDAP schema
            openDjSchemaFolder = os.path.join(Config.ldapBaseFolder,
                                              'config/schema/')
            self.copyFile(
                os.path.join(Config.outputFolder,
                             'test/jans-auth/schema/102-oxauth_test.ldif'),
                openDjSchemaFolder)
            self.copyFile(
                os.path.join(Config.outputFolder,
                             'test/scim-client/schema/103-scim_test.ldif'),
                openDjSchemaFolder)

            schema_fn = os.path.join(openDjSchemaFolder,
                                     '77-customAttributes.ldif')

            obcl_parser = myLdifParser(schema_fn)
            obcl_parser.parse()

            for i, o in enumerate(obcl_parser.entries[0][1]['objectClasses']):
                objcl = ObjectClass(o)
                if 'jansCustomPerson' in objcl.tokens['NAME']:
                    may_list = list(objcl.tokens['MAY'])
                    for a in ('scimCustomFirst', 'scimCustomSecond',
                              'scimCustomThird'):
                        if not a in may_list:
                            may_list.append(a)

                    objcl.tokens['MAY'] = tuple(may_list)
                    obcl_parser.entries[0][1]['objectClasses'][
                        i] = objcl.getstr()

            tmp_fn = '/tmp/77-customAttributes.ldif'
            with open(tmp_fn, 'wb') as w:
                ldif_writer = LDIFWriter(w)
                for dn, entry in obcl_parser.entries:
                    ldif_writer.unparse(dn, entry)

            self.copyFile(tmp_fn, openDjSchemaFolder)

            self.logIt("Making opndj listen all interfaces")
            ldap_operation_result = self.dbUtils.ldap_conn.modify(
                'cn=LDAPS Connection Handler,cn=Connection Handlers,cn=config',
                {'ds-cfg-listen-address': [ldap3.MODIFY_REPLACE, '0.0.0.0']})

            if not ldap_operation_result:
                self.logIt("Ldap modify operation failed {}".format(
                    str(self.ldap_conn.result)))
                self.logIt(
                    "Ldap modify operation failed {}".format(
                        str(self.ldap_conn.result)), True)

            self.dbUtils.ldap_conn.unbind()

            self.logIt("Re-starting opendj")
            self.restart('opendj')

            self.logIt("Re-binding opendj")
            # try 5 times to re-bind opendj
            for i in range(5):
                time.sleep(5)
                self.logIt("Try binding {} ...".format(i + 1))
                bind_result = self.dbUtils.ldap_conn.bind()
                if bind_result:
                    self.logIt("Binding to opendj was successful")
                    break
                self.logIt("Re-try in 5 seconds")
            else:
                self.logIt("Re-binding opendj FAILED")
                sys.exit("Re-binding opendj FAILED")

            for atr in ('myCustomAttr1', 'myCustomAttr2'):

                dn = 'ds-cfg-attribute={},cn=Index,ds-cfg-backend-id={},cn=Backends,cn=config'.format(
                    atr, 'userRoot')
                entry = {
                    'objectClass': ['top', 'ds-cfg-backend-index'],
                    'ds-cfg-attribute': [atr],
                    'ds-cfg-index-type': ['equality'],
                    'ds-cfg-index-entry-limit': ['4000']
                }
                self.logIt("Creating Index {}".format(dn))
                ldap_operation_result = self.dbUtils.ldap_conn.add(
                    dn, attributes=entry)
                if not ldap_operation_result:
                    self.logIt("Ldap modify operation failed {}".format(
                        str(self.dbUtils.ldap_conn.result)))
                    self.logIt(
                        "Ldap modify operation failed {}".format(
                            str(self.dbUtils.ldap_conn.result)), True)

        elif self.dbUtils.moddb in (static.BackendTypes.MYSQL,
                                    static.BackendTypes.PGSQL):
            pass

        elif self.dbUtils.moddb == static.BackendTypes.COUCHBASE:
            self.dbUtils.cbm.exec_query(
                'CREATE INDEX def_{0}_myCustomAttr1 ON `{0}`(myCustomAttr1) USING GSI WITH {{"defer_build":true}}'
                .format(Config.couchbase_bucket_prefix))
            self.dbUtils.cbm.exec_query(
                'CREATE INDEX def_{0}_myCustomAttr2 ON `{0}`(myCustomAttr2) USING GSI WITH {{"defer_build":true}}'
                .format(Config.couchbase_bucket_prefix))
            self.dbUtils.cbm.exec_query(
                'BUILD INDEX ON `{0}` (def_{0}_myCustomAttr1, def_{0}_myCustomAttr2)'
                .format(Config.couchbase_bucket_prefix))

        if self.dbUtils.moddb == static.BackendTypes.LDAP:
            self.dbUtils.ldap_conn.bind()

            result = self.dbUtils.search(
                'ou=configuration,o=jans',
                search_filter='(&(jansDbAuth=*)(objectClass=jansAppConf))',
                search_scope=ldap3.BASE)
            oxIDPAuthentication = json.loads(result['jansDbAuth'])
            oxIDPAuthentication['config']['servers'] = [
                '{0}:{1}'.format(Config.hostname, Config.ldaps_port)
            ]
            oxIDPAuthentication_js = json.dumps(oxIDPAuthentication, indent=2)
            self.dbUtils.set_configuration('jansDbAuth',
                                           oxIDPAuthentication_js)

        self.create_test_client_keystore()

        # Disable token binding module
        if base.os_name in ('ubuntu18', 'ubuntu20'):
            self.run(['a2dismod', 'mod_token_binding'])
            self.restart('apache2')

        self.restart('jans-auth')

        if Config.installEleven:
            eleven_tokens_package = os.path.join(
                Config.staticFolder, 'eleven/jans-eleven-tokens.tar.gz')
            target_dir = '/var/lib/softhsm/tokens/'
            if not os.path.exists(target_dir):
                os.makedirs(target_dir)
            self.run([
                paths.cmd_tar, '-zxf', eleven_tokens_package, '-C', target_dir
            ])
Exemple #3
0
    def import_ldif(self, ldif_files, bucket=None, force=None):

        base.logIt("Importing ldif file(s): {} ".format(', '.join(ldif_files)))

        sql_data_fn = os.path.join(Config.outputFolder, Config.rdbm_type, 'jans_data.sql')

        for ldif_fn in ldif_files:
            base.logIt("Importing entries from " + ldif_fn)
            parser = ldif_utils.myLdifParser(ldif_fn)
            parser.parse()

            for dn, entry in parser.entries:
                backend_location = force if force else self.get_backend_location_for_dn(dn)
                if backend_location == BackendTypes.LDAP:
                    if 'add' in  entry and 'changetype' in entry:
                        base.logIt("LDAP modify add dn:{} entry:{}".format(dn, dict(entry)))
                        change_attr = entry['add'][0]
                        ldap_operation_result = self.ldap_conn.modify(dn, {change_attr: [(ldap3.MODIFY_ADD, entry[change_attr])]})
                        self.log_ldap_result(ldap_operation_result)

                    elif 'replace' in  entry and 'changetype' in entry:
                        base.logIt("LDAP modify replace dn:{} entry:{}".format(dn, dict(entry)))
                        change_attr = entry['replace'][0]
                        ldap_operation_result = self.ldap_conn.modify(dn, {change_attr: [(ldap3.MODIFY_REPLACE, [entry[change_attr][0]])]})
                        self.log_ldap_result(ldap_operation_result)

                    elif not self.dn_exists(dn):
                        base.logIt("Adding LDAP dn:{} entry:{}".format(dn, dict(entry)))
                        ldap_operation_result = self.ldap_conn.add(dn, attributes=entry)
                        self.log_ldap_result(ldap_operation_result)

                elif backend_location in (BackendTypes.MYSQL, BackendTypes.PGSQL):
                    if self.Base is None:
                        self.rdm_automapper()

                    # TODO: inserting data to sub tables to be implemented for mysql and pgsql

                    if 'add' in  entry and 'changetype' in entry:
                        attribute = entry['add'][0]
                        new_val = entry[attribute]
                        sqlalchObj = self.get_sqlalchObj_for_dn(dn)

                        if sqlalchObj:
                            if isinstance(sqlalchObj.__table__.columns[attribute].type, self.json_dialects_instance):
                                cur_val = copy.deepcopy(getattr(sqlalchObj, attribute))
                                for val_ in new_val:
                                    cur_val['v'].append(val_)
                                setattr(sqlalchObj, attribute, cur_val)
                            else:
                                setattr(sqlalchObj, attribute, new_val[0])

                            self.session.commit()

                        else:
                            base.logIt("Can't find current value for repmacement of {}".replace(str(entry)), True)
                            continue

                    elif 'replace' in entry and 'changetype' in entry:
                        attribute = entry['replace'][0]
                        new_val = self.get_rdbm_val(attribute, entry[attribute])
                        sqlalchObj = self.get_sqlalchObj_for_dn(dn)

                        if sqlalchObj:
                            setattr(sqlalchObj, attribute, new_val)
                            self.session.commit()
                        else:
                            base.logIt("Can't find current value for repmacement of {}".replace(str(entry)), True)
                            continue

                    else:
                        vals = {}
                        dn_parsed = dnutils.parse_dn(dn)
                        rdn_name = dn_parsed[0][0]
                        objectClass = self.get_clean_objcet_class(entry)
                        if objectClass.lower() == 'organizationalunit':
                            continue

                        vals['doc_id'] = dn_parsed[0][1]
                        vals['dn'] = dn
                        vals['objectClass'] = objectClass

                        #entry.pop(rdn_name)
                        if 'objectClass' in entry:
                            entry.pop('objectClass')
                        elif 'objectclass' in entry:
                            entry.pop('objectclass')

                        table_name = objectClass

                        if self.dn_exists_rdbm(dn, table_name):
                            base.logIt("DN {} exsits in {} skipping".format(dn, Config.rdbm_type))
                            continue

                        for lkey in entry:
                            vals[lkey] = self.get_rdbm_val(lkey, entry[lkey])

                        sqlalchCls = self.Base.classes[table_name]

                        for col in sqlalchCls.__table__.columns:
                            if isinstance(col.type, self.json_dialects_instance) and not col.name in vals:
                                vals[col.name] = {'v': []}

                        sqlalchObj = sqlalchCls()

                        for v in vals:
                            setattr(sqlalchObj, v, vals[v])

                        base.logIt("Adding {}".format(sqlalchObj.doc_id))
                        self.session.add(sqlalchObj)
                        self.session.commit()


                elif backend_location == BackendTypes.SPANNER:

                    if 'add' in  entry and 'changetype' in entry:
                        table = self.get_spanner_table_for_dn(dn)
                        doc_id = self.get_doc_id_from_dn(dn)
                        change_attr = entry['add'][0]
                        if table:
                            doc_id = self.get_doc_id_from_dn(dn)

                            if self.in_subtable(table, change_attr):
                                sub_table = '{}_{}'.format(table, change_attr)
                                for subval in entry[change_attr]:
                                    typed_val = self.get_rdbm_val(change_attr, subval, rdbm_type='spanner')
                                    dict_doc_id = self.get_sha_digest(typed_val)
                                    self.spanner.insert_data(table=sub_table, columns=['doc_id', 'dict_doc_id', change_attr], values=[[doc_id, typed_val, typed_val]])

                            else:
                                data = self.spanner.exec_sql('SELECT {} FROM {} WHERE doc_id="{}"'.format(entry['add'][0], table, doc_id))
                                if data.get('rows'):
                                    cur_data = []

                                    if 'rows' in data and data['rows'] and data['rows'][0] and data['rows'][0][0]:
                                        cur_data = data['rows'][0][0]
                                    
                                    for cur_val in entry[change_attr]:
                                        typed_val = self.get_rdbm_val(change_attr, cur_val, rdbm_type='spanner')
                                        cur_data.append(typed_val)

                                self.spanner.update_data(table=table, columns=['doc_id', change_attr], values=[[doc_id, cur_data]])

                    elif 'replace' in entry and 'changetype' in entry:
                        table = self.get_spanner_table_for_dn(dn)
                        doc_id = self.get_doc_id_from_dn(dn)
                        replace_attr = entry['replace'][0]
                        typed_val = self.get_rdbm_val(replace_attr, entry[replace_attr], rdbm_type='spanner')

                        if self.in_subtable(table, replace_attr):
                            sub_table = '{}_{}'.format(table, replace_attr)
                            # TODO: how to replace ?
                            #for subval in typed_val:
                            #    self.spanner.update_data(table=sub_table, columns=['doc_id', replace_attr], values=[[doc_id, subval]])
                        else:
                            self.spanner.update_data(table=table, columns=['doc_id', replace_attr], values=[[doc_id, typed_val]])

                    else:
                        vals = {}
                        dn_parsed = dnutils.parse_dn(dn)
                        rdn_name = dn_parsed[0][0]
                        objectClass = objectClass = self.get_clean_objcet_class(entry)
                        if objectClass.lower() == 'organizationalunit':
                            continue

                        doc_id = dn_parsed[0][1]
                        vals['doc_id'] = doc_id
                        vals['dn'] = dn
                        vals['objectClass'] = objectClass

                        if 'objectClass' in entry:
                            entry.pop('objectClass')
                        elif 'objectclass' in entry:
                            entry.pop('objectclass')

                        table_name = objectClass

                        subtable_data = []

                        for lkey in entry:
                            spanner_vals = self.get_rdbm_val(lkey, entry[lkey], rdbm_type='spanner')
                            if not self.in_subtable(table_name, lkey):
                                vals[lkey] = spanner_vals
                            else:
                                sub_table = '{}_{}'.format(table_name, lkey)
                                sub_table_columns = ['doc_id', 'dict_doc_id', lkey]
                                sub_table_values = []
                                for subtableval in spanner_vals:
                                    dict_doc_id = self.get_sha_digest(subtableval)
                                    sub_table_values.append([doc_id, dict_doc_id, subtableval])
                                subtable_data.append((sub_table, sub_table_columns, sub_table_values))

                        columns = [ *vals.keys() ]
                        values = [ vals[lkey] for lkey in columns ]

                        self.spanner.insert_data(table=table_name, columns=columns, values=[values])

                        for sdata in subtable_data:
                            self.spanner.insert_data(table=sdata[0], columns=sdata[1], values=sdata[2])

                elif backend_location == BackendTypes.COUCHBASE:
                    if len(entry) < 3:
                        continue
                    key, document = ldif_utils.get_document_from_entry(dn, entry)
                    cur_bucket = bucket if bucket else self.get_bucket_for_dn(dn)
                    base.logIt("Addnig document {} to Couchebase bucket {}".format(key, cur_bucket))

                    n1ql_list = []

                    if 'changetype' in document:
                        if 'replace' in document:
                            attribute = document['replace']
                            n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=%s' % (cur_bucket, key, attribute, json.dumps(document[attribute])))
                        elif 'add' in document:
                            attribute = document['add']
                            result = self.check_attribute_exists(key, attribute)
                            data = document[attribute]
                            if result:
                                if isinstance(data, list):
                                    for d in data:
                                        n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)' % (cur_bucket, key, attribute, attribute, json.dumps(d)))
                                else:
                                    n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)' % (cur_bucket, key, attribute, attribute, json.dumps(data)))
                            else:
                                if attribute in attribDataTypes.listAttributes and not isinstance(data, list):
                                    data = [data]
                                n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=%s' % (cur_bucket, key, attribute, json.dumps(data)))
                    else:
                        for k in document:
                            try:
                                kdata = json.loads(document[k])
                                if isinstance(kdata, dict):
                                    document[k] = kdata
                            except:
                                pass

                        n1ql_list.append('UPSERT INTO `%s` (KEY, VALUE) VALUES ("%s", %s)' % (cur_bucket, key, json.dumps(document)))

                    for q in n1ql_list:
                        self.cbm.exec_query(q)
Exemple #4
0
    def load_test_data(self):

        if not self.scimInstaller.installed():
            self.logIt("Scim was not installed. Installing")
            Config.installScimServer = True
            self.scimInstaller.start_installation()

        self.encode_test_passwords()

        self.logIt("Rendering test templates")

        Config.templateRenderingDict[
            'config_oxauth_test_ldap'] = '# Not available'
        Config.templateRenderingDict[
            'config_oxauth_test_couchbase'] = '# Not available'

        if self.getMappingType('ldap'):
            template_text = self.readFile(
                os.path.join(
                    self.template_base,
                    'oxauth/server/config-oxauth-test-ldap.properties.nrnd'))
            rendered_text = self.fomatWithDict(
                template_text,
                self.merge_dicts(Config.__dict__,
                                 Config.templateRenderingDict))
            Config.templateRenderingDict[
                'config_oxauth_test_ldap'] = rendered_text

        if self.getMappingType('couchbase'):
            template_text = self.readFile(
                os.path.join(
                    self.template_base,
                    'oxauth/server/config-oxauth-test-couchbase.properties.nrnd'
                ))
            rendered_text = self.fomatWithDict(
                template_text,
                self.merge_dicts(Config.__dict__,
                                 Config.templateRenderingDict))
            Config.templateRenderingDict[
                'config_oxauth_test_couchbase'] = rendered_text

        self.render_templates_folder(self.template_base)

        self.logIt("Loading test ldif files")

        if not self.passportInstaller.installed():
            self.passportInstaller.generate_configuration()

        ox_auth_test_ldif = os.path.join(
            Config.outputFolder, 'test/oxauth/data/oxauth-test-data.ldif')
        ox_auth_test_user_ldif = os.path.join(
            Config.outputFolder, 'test/oxauth/data/oxauth-test-data-user.ldif')

        scim_test_ldif = os.path.join(
            Config.outputFolder, 'test/scim-client/data/scim-test-data.ldif')
        scim_test_user_ldif = os.path.join(
            Config.outputFolder,
            'test/scim-client/data/scim-test-data-user.ldif')

        ldif_files = (ox_auth_test_ldif, scim_test_ldif,
                      ox_auth_test_user_ldif, scim_test_user_ldif)
        self.dbUtils.import_ldif(ldif_files)

        apache_user = '******' if base.clone_type == 'deb' else 'apache'

        # Client keys deployment
        base.download(
            'https://raw.githubusercontent.com/GluuFederation/oxAuth/master/Client/src/test/resources/oxauth_test_client_keys.zip',
            '/var/www/html/oxauth_test_client_keys.zip')
        self.run([
            paths.cmd_unzip, '-o', '/var/www/html/oxauth_test_client_keys.zip',
            '-d', '/var/www/html/'
        ])
        self.run([paths.cmd_rm, '-rf', 'oxauth_test_client_keys.zip'])
        self.run([
            paths.cmd_chown, '-R', 'root:' + apache_user,
            '/var/www/html/oxauth-client'
        ])

        oxAuthConfDynamic_changes = {
            'dynamicRegistrationCustomObjectClass':
            'oxAuthClientCustomAttributes',
            'dynamicRegistrationCustomAttributes': [
                "oxAuthTrustedClient", "myCustomAttr1", "myCustomAttr2",
                "oxIncludeClaimsInIdToken"
            ],
            'dynamicRegistrationExpirationTime':
            86400,
            'dynamicGrantTypeDefault': [
                "authorization_code", "implicit", "password",
                "client_credentials", "refresh_token",
                "urn:ietf:params:oauth:grant-type:uma-ticket"
            ],
            'legacyIdTokenClaims':
            True,
            'authenticationFiltersEnabled':
            True,
            'clientAuthenticationFiltersEnabled':
            True,
            'keyRegenerationEnabled':
            True,
            'openidScopeBackwardCompatibility':
            False,
        }

        custom_scripts = ('2DAF-F995', '2DAF-F996', '4BBE-C6A8')

        self.dbUtils.set_oxAuthConfDynamic(oxAuthConfDynamic_changes)

        # Enable custom scripts
        for inum in custom_scripts:
            self.dbUtils.enable_script(inum)

        if self.dbUtils.moddb == static.BackendTypes.LDAP:
            # Update LDAP schema
            openDjSchemaFolder = os.path.join(Config.ldapBaseFolder,
                                              'config/schema/')
            self.copyFile(
                os.path.join(Config.outputFolder,
                             'test/oxauth/schema/102-oxauth_test.ldif'),
                openDjSchemaFolder)
            self.copyFile(
                os.path.join(Config.outputFolder,
                             'test/scim-client/schema/103-scim_test.ldif'),
                openDjSchemaFolder)

            schema_fn = os.path.join(openDjSchemaFolder,
                                     '77-customAttributes.ldif')

            obcl_parser = myLdifParser(schema_fn)
            obcl_parser.parse()

            for i, o in enumerate(obcl_parser.entries[0][1]['objectClasses']):
                objcl = ObjectClass(o)
                if 'gluuCustomPerson' in objcl.tokens['NAME']:
                    may_list = list(objcl.tokens['MAY'])
                    for a in ('scimCustomFirst', 'scimCustomSecond',
                              'scimCustomThird'):
                        if not a in may_list:
                            may_list.append(a)

                    objcl.tokens['MAY'] = tuple(may_list)
                    obcl_parser.entries[0][1]['objectClasses'][
                        i] = objcl.getstr()

            tmp_fn = '/tmp/77-customAttributes.ldif'
            with open(tmp_fn, 'wb') as w:
                ldif_writer = LDIFWriter(w)
                for dn, entry in obcl_parser.entries:
                    ldif_writer.unparse(dn, entry)

            self.copyFile(tmp_fn, openDjSchemaFolder)
            cwd = os.path.join(Config.ldapBaseFolder, 'bin')
            dsconfigCmd = (
                '{} --trustAll --no-prompt --hostname {} --port {} '
                '--bindDN "{}" --bindPasswordFile /home/ldap/.pw set-connection-handler-prop '
                '--handler-name "LDAPS Connection Handler" --set listen-address:0.0.0.0'
            ).format(os.path.join(Config.ldapBaseFolder,
                                  'bin/dsconfig'), Config.ldap_hostname,
                     Config.ldap_admin_port, Config.ldap_binddn)

            self.run(['/bin/su', 'ldap', '-c', dsconfigCmd], cwd=cwd)

            self.dbUtils.ldap_conn.unbind()

            self.restart('opendj')
            #wait 10 seconds to start opendj
            time.sleep(10)

            for atr in ('myCustomAttr1', 'myCustomAttr2'):
                cmd = (
                    'create-backend-index --backend-name userRoot --type generic '
                    '--index-name {} --set index-type:equality --set index-entry-limit:4000 '
                    '--hostName {} --port {} --bindDN "{}" -j /home/ldap/.pw '
                    '--trustAll --noPropertiesFile --no-prompt').format(
                        atr, Config.ldap_hostname, Config.ldap_admin_port,
                        Config.ldap_binddn)

                dsconfigCmd = '{1} {2}'.format(Config.ldapBaseFolder,
                                               os.path.join(cwd, 'dsconfig'),
                                               cmd)
                self.run(['/bin/su', 'ldap', '-c', dsconfigCmd], cwd=cwd)

        else:
            self.dbUtils.cbm.exec_query(
                'CREATE INDEX def_gluu_myCustomAttr1 ON `gluu`(myCustomAttr1) USING GSI WITH {"defer_build":true}'
            )
            self.dbUtils.cbm.exec_query(
                'CREATE INDEX def_gluu_myCustomAttr2 ON `gluu`(myCustomAttr2) USING GSI WITH {"defer_build":true}'
            )
            self.dbUtils.cbm.exec_query(
                'BUILD INDEX ON `gluu` (def_gluu_myCustomAttr1, def_gluu_myCustomAttr2)'
            )

        self.dbUtils.ldap_conn.bind()

        result = self.dbUtils.search('ou=configuration,o=gluu',
                                     search_filter='(oxIDPAuthentication=*)',
                                     search_scope=ldap3.BASE)

        oxIDPAuthentication = json.loads(result['oxIDPAuthentication'])
        oxIDPAuthentication['config']['servers'] = [
            '{0}:{1}'.format(Config.hostname, Config.ldaps_port)
        ]
        oxIDPAuthentication_js = json.dumps(oxIDPAuthentication, indent=2)
        self.dbUtils.set_configuration('oxIDPAuthentication',
                                       oxIDPAuthentication_js)

        self.create_test_client_keystore()

        # Disable token binding module
        if base.os_name in ('ubuntu18', 'ubuntu20'):
            self.run(['a2dismod', 'mod_token_binding'])
            self.restart('apache2')

        self.restart('oxauth')
    def load_test_data(self):

        if not self.scimInstaller.installed():
            self.logIt("Scim was not installed. Installing")
            Config.installScimServer = True
            self.scimInstaller.start_installation()

        self.encode_test_passwords()

        self.logIt("Rendering test templates")

        Config.templateRenderingDict[
            'config_oxauth_test_ldap'] = '# Not available'
        Config.templateRenderingDict[
            'config_oxauth_test_couchbase'] = '# Not available'

        config_oxauth_test_properties = self.fomatWithDict(
            'server.name=%(hostname)s\nconfig.oxauth.issuer=http://localhost:80\nconfig.oxauth.contextPath=http://localhost:80\nconfig.oxauth.salt=%(encode_salt)s\nconfig.persistence.type=%(persistence_type)s\n\n',
            self.merge_dicts(Config.__dict__, Config.templateRenderingDict))

        if self.getMappingType('ldap'):
            template_text = self.readFile(
                os.path.join(
                    self.template_base,
                    'oxauth/server/config-oxauth-test-ldap.properties.nrnd'))
            rendered_text = self.fomatWithDict(
                template_text,
                self.merge_dicts(Config.__dict__,
                                 Config.templateRenderingDict))
            config_oxauth_test_properties += '#ldap\n' + rendered_text

        if self.getMappingType('couchbase'):
            template_text = self.readFile(
                os.path.join(
                    self.template_base,
                    'oxauth/server/config-oxauth-test-couchbase.properties.nrnd'
                ))
            rendered_text = self.fomatWithDict(
                template_text,
                self.merge_dicts(Config.__dict__,
                                 Config.templateRenderingDict))
            config_oxauth_test_properties += '\n#couchbase\n' + rendered_text

        self.writeFile(
            os.path.join(Config.outputFolder,
                         'test/oxauth/server/config-oxauth-test.properties'),
            config_oxauth_test_properties)

        self.render_templates_folder(self.template_base)

        self.logIt("Loading test ldif files")

        ox_auth_test_ldif = os.path.join(
            Config.outputFolder, 'test/oxauth/data/oxauth-test-data.ldif')
        ox_auth_test_user_ldif = os.path.join(
            Config.outputFolder, 'test/oxauth/data/oxauth-test-data-user.ldif')

        scim_test_ldif = os.path.join(
            Config.outputFolder, 'test/scim-client/data/scim-test-data.ldif')
        scim_test_user_ldif = os.path.join(
            Config.outputFolder,
            'test/scim-client/data/scim-test-data-user.ldif')

        ldif_files = (ox_auth_test_ldif, scim_test_ldif,
                      ox_auth_test_user_ldif, scim_test_user_ldif)
        self.dbUtils.import_ldif(ldif_files)

        apache_user = '******' if base.clone_type == 'deb' else 'apache'

        # Client keys deployment
        base.download(
            'https://raw.githubusercontent.com/JanssenProject/jans-auth-server/master/client/src/test/resources/oxauth_test_client_keys.zip',
            '/var/www/html/oxauth_test_client_keys.zip')
        self.run([
            paths.cmd_unzip, '-o', '/var/www/html/oxauth_test_client_keys.zip',
            '-d', '/var/www/html/'
        ])
        self.run([paths.cmd_rm, '-rf', 'oxauth_test_client_keys.zip'])
        self.run([
            paths.cmd_chown, '-R', 'root:' + apache_user,
            '/var/www/html/oxauth-client'
        ])

        oxAuthConfDynamic_changes = {
            'dynamicRegistrationCustomObjectClass':
            'oxAuthClientCustomAttributes',
            'dynamicRegistrationCustomAttributes': [
                "oxAuthTrustedClient", "myCustomAttr1", "myCustomAttr2",
                "oxIncludeClaimsInIdToken"
            ],
            'dynamicRegistrationExpirationTime':
            86400,
            'dynamicGrantTypeDefault': [
                "authorization_code", "implicit", "password",
                "client_credentials", "refresh_token",
                "urn:ietf:params:oauth:grant-type:uma-ticket"
            ],
            'legacyIdTokenClaims':
            True,
            'authenticationFiltersEnabled':
            True,
            'clientAuthenticationFiltersEnabled':
            True,
            'keyRegenerationEnabled':
            True,
            'openidScopeBackwardCompatibility':
            False,
        }

        custom_scripts = ('2DAF-F995', '2DAF-F996', '4BBE-C6A8')

        self.dbUtils.set_oxAuthConfDynamic(oxAuthConfDynamic_changes)

        # Enable custom scripts
        for inum in custom_scripts:
            self.dbUtils.enable_script(inum)

        if self.dbUtils.moddb == static.BackendTypes.LDAP:
            # Update LDAP schema
            openDjSchemaFolder = os.path.join(Config.ldapBaseFolder,
                                              'config/schema/')
            self.copyFile(
                os.path.join(Config.outputFolder,
                             'test/oxauth/schema/102-oxauth_test.ldif'),
                openDjSchemaFolder)
            self.copyFile(
                os.path.join(Config.outputFolder,
                             'test/scim-client/schema/103-scim_test.ldif'),
                openDjSchemaFolder)

            schema_fn = os.path.join(openDjSchemaFolder,
                                     '77-customAttributes.ldif')

            obcl_parser = myLdifParser(schema_fn)
            obcl_parser.parse()

            for i, o in enumerate(obcl_parser.entries[0][1]['objectClasses']):
                objcl = ObjectClass(o)
                if 'gluuCustomPerson' in objcl.tokens['NAME']:
                    may_list = list(objcl.tokens['MAY'])
                    for a in ('scimCustomFirst', 'scimCustomSecond',
                              'scimCustomThird'):
                        if not a in may_list:
                            may_list.append(a)

                    objcl.tokens['MAY'] = tuple(may_list)
                    obcl_parser.entries[0][1]['objectClasses'][
                        i] = objcl.getstr()

            tmp_fn = '/tmp/77-customAttributes.ldif'
            with open(tmp_fn, 'wb') as w:
                ldif_writer = LDIFWriter(w)
                for dn, entry in obcl_parser.entries:
                    ldif_writer.unparse(dn, entry)

            self.copyFile(tmp_fn, openDjSchemaFolder)

            self.logIt("Making opndj listen all interfaces")
            ldap_operation_result = self.dbUtils.ldap_conn.modify(
                'cn=LDAPS Connection Handler,cn=Connection Handlers,cn=config',
                {'ds-cfg-listen-address': [ldap3.MODIFY_REPLACE, '0.0.0.0']})

            if not ldap_operation_result:
                self.logIt("Ldap modify operation failed {}".format(
                    str(self.ldap_conn.result)))
                self.logIt(
                    "Ldap modify operation failed {}".format(
                        str(self.ldap_conn.result)), True)

            self.dbUtils.ldap_conn.unbind()

            self.logIt("Re-starting opendj")
            self.restart('opendj')

            self.logIt("Re-binding opendj")
            # try 5 times to re-bind opendj
            for i in range(5):
                time.sleep(5)
                self.logIt("Try binding {} ...".format(i + 1))
                bind_result = self.dbUtils.ldap_conn.bind()
                if bind_result:
                    self.logIt("Binding to opendj was successful")
                    break
                self.logIt("Re-try in 5 seconds")
            else:
                self.logIt("Re-binding opendj FAILED")
                sys.exit("Re-binding opendj FAILED")

            for atr in ('myCustomAttr1', 'myCustomAttr2'):

                dn = 'ds-cfg-attribute={},cn=Index,ds-cfg-backend-id={},cn=Backends,cn=config'.format(
                    atr, 'userRoot')
                entry = {
                    'objectClass': ['top', 'ds-cfg-backend-index'],
                    'ds-cfg-attribute': [atr],
                    'ds-cfg-index-type': ['equality'],
                    'ds-cfg-index-entry-limit': ['4000']
                }
                self.logIt("Creating Index {}".format(dn))
                ldap_operation_result = self.dbUtils.ldap_conn.add(
                    dn, attributes=entry)
                if not ldap_operation_result:
                    self.logIt("Ldap modify operation failed {}".format(
                        str(self.dbUtils.ldap_conn.result)))
                    self.logIt(
                        "Ldap modify operation failed {}".format(
                            str(self.dbUtils.ldap_conn.result)), True)

        else:
            self.dbUtils.cbm.exec_query(
                'CREATE INDEX def_gluu_myCustomAttr1 ON `gluu`(myCustomAttr1) USING GSI WITH {"defer_build":true}'
            )
            self.dbUtils.cbm.exec_query(
                'CREATE INDEX def_gluu_myCustomAttr2 ON `gluu`(myCustomAttr2) USING GSI WITH {"defer_build":true}'
            )
            self.dbUtils.cbm.exec_query(
                'BUILD INDEX ON `gluu` (def_gluu_myCustomAttr1, def_gluu_myCustomAttr2)'
            )

        self.dbUtils.ldap_conn.bind()

        result = self.dbUtils.search('ou=configuration,o=jans',
                                     search_filter='(jansIDPAuthn=*)',
                                     search_scope=ldap3.BASE)

        oxIDPAuthentication = json.loads(result['jansIDPAuthn'])
        oxIDPAuthentication['config']['servers'] = [
            '{0}:{1}'.format(Config.hostname, Config.ldaps_port)
        ]
        oxIDPAuthentication_js = json.dumps(oxIDPAuthentication, indent=2)
        self.dbUtils.set_configuration('jansIDPAuthn', oxIDPAuthentication_js)

        self.create_test_client_keystore()

        # Disable token binding module
        if base.os_name in ('ubuntu18', 'ubuntu20'):
            self.run(['a2dismod', 'mod_token_binding'])
            self.restart('apache2')

        self.restart('jans-auth')
    def import_ldif(self, ldif_files, bucket=None, force=None):

        for ldif_fn in ldif_files:
            parser = ldif_utils.myLdifParser(ldif_fn)
            parser.parse()

            for dn, entry in parser.entries:
                backend_location = force if force else self.get_backend_location_for_dn(
                    dn)
                if backend_location == BackendTypes.LDAP:
                    if not self.dn_exists(dn):
                        base.logIt("Adding LDAP dn:{} entry:{}".format(
                            dn, dict(entry)))
                        self.ldap_conn.add(dn, attributes=entry)

                elif backend_location == BackendTypes.COUCHBASE:
                    if len(entry) < 3:
                        continue
                    key, document = ldif_utils.get_document_from_entry(
                        dn, entry)
                    cur_bucket = bucket if bucket else self.get_bucket_for_dn(
                        dn)
                    base.logIt(
                        "Addnig document {} to Couchebase bucket {}".format(
                            key, cur_bucket))

                    n1ql_list = []

                    if 'changetype' in document:
                        if 'replace' in document:
                            attribute = document['replace']
                            n1ql_list.append(
                                'UPDATE `%s` USE KEYS "%s" SET `%s`=%s' %
                                (cur_bucket, key, attribute,
                                 json.dumps(document[attribute])))
                        elif 'add' in document:
                            attribute = document['add']
                            result = self.check_attribute_exists(
                                key, attribute)
                            data = document[attribute]
                            if result:
                                if isinstance(data, list):
                                    for d in data:
                                        n1ql_list.append(
                                            'UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)'
                                            % (cur_bucket, key, attribute,
                                               attribute, json.dumps(d)))
                                else:
                                    n1ql_list.append(
                                        'UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)'
                                        % (cur_bucket, key, attribute,
                                           attribute, json.dumps(data)))
                            else:
                                if attribute in attribDataTypes.listAttributes and not isinstance(
                                        data, list):
                                    data = [data]
                                n1ql_list.append(
                                    'UPDATE `%s` USE KEYS "%s" SET `%s`=%s' %
                                    (cur_bucket, key, attribute,
                                     json.dumps(data)))
                    else:
                        n1ql_list.append(
                            'UPSERT INTO `%s` (KEY, VALUE) VALUES ("%s", %s)' %
                            (cur_bucket, key, json.dumps(document)))

                    for q in n1ql_list:
                        self.cbm.exec_query(q)
Exemple #7
0
    def import_ldif(self, ldif_files, bucket=None, force=None):

        base.logIt("Importing ldif file(s): {} ".format(', '.join(ldif_files)))

        sql_data_fn = os.path.join(Config.outputFolder, Config.rdbm_type, 'jans_data.sql')

        for ldif_fn in ldif_files:
            base.logIt("Importing entries from " + ldif_fn)
            parser = ldif_utils.myLdifParser(ldif_fn)
            parser.parse()

            for dn, entry in parser.entries:
                backend_location = force if force else self.get_backend_location_for_dn(dn)
                if backend_location == BackendTypes.LDAP:
                    if 'add' in  entry and 'changetype' in entry:
                        base.logIt("LDAP modify add dn:{} entry:{}".format(dn, dict(entry)))
                        change_attr = entry['add'][0]
                        ldap_operation_result = self.ldap_conn.modify(dn, {change_attr: [(ldap3.MODIFY_ADD, entry[change_attr])]})
                        self.log_ldap_result(ldap_operation_result)

                    elif 'replace' in  entry and 'changetype' in entry:
                        base.logIt("LDAP modify replace dn:{} entry:{}".format(dn, dict(entry)))
                        change_attr = entry['replace'][0]
                        ldap_operation_result = self.ldap_conn.modify(dn, {change_attr: [(ldap3.MODIFY_REPLACE, [entry[change_attr][0]])]})
                        self.log_ldap_result(ldap_operation_result)

                    elif not self.dn_exists(dn):
                        base.logIt("Adding LDAP dn:{} entry:{}".format(dn, dict(entry)))
                        ldap_operation_result = self.ldap_conn.add(dn, attributes=entry)
                        self.log_ldap_result(ldap_operation_result)

                elif backend_location in (BackendTypes.MYSQL, BackendTypes.PGSQL):
                    if self.Base is None:
                        self.rdm_automapper()

                    if 'add' in  entry and 'changetype' in entry:
                        attribute = entry['add'][0]
                        new_val = entry[attribute]
                        sqlalchObj = self.get_sqlalchObj_for_dn(dn)

                        if sqlalchObj:
                            if isinstance(sqlalchObj.__table__.columns[attribute].type, self.json_dialects_instance):
                                cur_val = copy.deepcopy(getattr(sqlalchObj, attribute))
                                for val_ in new_val:
                                    cur_val['v'].append(val_)
                                setattr(sqlalchObj, attribute, cur_val)
                            else:
                                setattr(sqlalchObj, attribute, new_val[0])

                            self.session.commit()

                        else:
                            base.logIt("Can't find current value for repmacement of {}".replace(str(entry)), True)
                            continue

                    elif 'replace' in entry and 'changetype' in entry:
                        attribute = entry['replace'][0]
                        new_val = self.get_rdbm_val(attribute, entry[attribute])
                        sqlalchObj = self.get_sqlalchObj_for_dn(dn)

                        if sqlalchObj:
                            setattr(sqlalchObj, attribute, new_val)
                            self.session.commit()
                        else:
                            base.logIt("Can't find current value for repmacement of {}".replace(str(entry)), True)
                            continue

                    else:
                        vals = {}
                        dn_parsed = dnutils.parse_dn(dn)
                        rdn_name = dn_parsed[0][0]
                        objectClass = entry.get('objectClass') or entry.get('objectclass')

                        if objectClass:
                            if 'top' in objectClass:
                                objectClass.remove('top')
                            if  len(objectClass) == 1 and objectClass[0].lower() == 'organizationalunit':
                                continue
                            objectClass = objectClass[-1]

                        vals['doc_id'] = dn_parsed[0][1]
                        vals['dn'] = dn
                        vals['objectClass'] = objectClass

                        #entry.pop(rdn_name)
                        if 'objectClass' in entry:
                            entry.pop('objectClass')
                        elif 'objectclass' in entry:
                            entry.pop('objectclass')

                        table_name = objectClass

                        if self.dn_exists_rdbm(dn, table_name):
                            base.logIt("DN {} exsits in {} skipping".format(dn, Config.rdbm_type))
                            continue

                        for lkey in entry:
                            vals[lkey] = self.get_rdbm_val(lkey, entry[lkey])

                        sqlalchCls = self.Base.classes[table_name]

                        for col in sqlalchCls.__table__.columns:
                            if isinstance(col.type, self.json_dialects_instance) and not col.name in vals:
                                vals[col.name] = {'v': []}

                        sqlalchObj = sqlalchCls()

                        for v in vals:
                            setattr(sqlalchObj, v, vals[v])

                        base.logIt("Adding {}".format(sqlalchObj.doc_id))
                        self.session.add(sqlalchObj)
                        self.session.commit()


                elif backend_location == BackendTypes.COUCHBASE:
                    if len(entry) < 3:
                        continue
                    key, document = ldif_utils.get_document_from_entry(dn, entry)
                    cur_bucket = bucket if bucket else self.get_bucket_for_dn(dn)
                    base.logIt("Addnig document {} to Couchebase bucket {}".format(key, cur_bucket))

                    n1ql_list = []

                    if 'changetype' in document:
                        if 'replace' in document:
                            attribute = document['replace']
                            n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=%s' % (cur_bucket, key, attribute, json.dumps(document[attribute])))
                        elif 'add' in document:
                            attribute = document['add']
                            result = self.check_attribute_exists(key, attribute)
                            data = document[attribute]
                            if result:
                                if isinstance(data, list):
                                    for d in data:
                                        n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)' % (cur_bucket, key, attribute, attribute, json.dumps(d)))
                                else:
                                    n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)' % (cur_bucket, key, attribute, attribute, json.dumps(data)))
                            else:
                                if attribute in attribDataTypes.listAttributes and not isinstance(data, list):
                                    data = [data]
                                n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=%s' % (cur_bucket, key, attribute, json.dumps(data)))
                    else:
                        for k in document:
                            try:
                                kdata = json.loads(document[k])
                                if isinstance(kdata, dict):
                                    document[k] = kdata
                            except:
                                pass

                        n1ql_list.append('UPSERT INTO `%s` (KEY, VALUE) VALUES ("%s", %s)' % (cur_bucket, key, json.dumps(document)))

                    for q in n1ql_list:
                        self.cbm.exec_query(q)