def dn_exists(self, dn): mapping_location = self.get_backend_location_for_dn(dn) if mapping_location in (BackendTypes.MYSQL, BackendTypes.PGSQL): base.logIt("Querying RDBM for dn {}".format(dn)) result = self.get_sqlalchObj_for_dn(dn) if result: return result.__dict__ return elif mapping_location == BackendTypes.LDAP: base.logIt("Querying LDAP for dn {}".format(dn)) result = self.ldap_conn.search(search_base=dn, search_filter='(objectClass=*)', search_scope=ldap3.BASE, attributes=['*']) if result: key, document = ldif_utils.get_document_from_entry(self.ldap_conn.response[0]['dn'], self.ldap_conn.response[0]['attributes']) return document else: bucket = self.get_bucket_for_dn(dn) key = ldif_utils.get_key_from(dn) n1ql = 'SELECT * FROM `{}` USE KEYS "{}"'.format(bucket, key) result = self.cbm.exec_query(n1ql) if result.ok: data = result.json() if data.get('results'): return data['results'][0][bucket] return
def bind(self, use_ssl=True): if Config.mappingLocations['default'] == 'ldap': self.moddb = BackendTypes.LDAP else: self.moddb = BackendTypes.COUCHBASE if not hasattr(self, 'ldap_conn'): for group in Config.mappingLocations: if Config.mappingLocations[group] == 'ldap': ldap_server = ldap3.Server(Config.ldap_hostname, port=int(Config.ldaps_port), use_ssl=use_ssl) self.ldap_conn = ldap3.Connection( ldap_server, user=Config.ldap_binddn, password=Config.ldapPass, ) base.logIt( "Making LDAP Connection to host {}:{} with user {}". format(Config.ldap_hostname, Config.ldaps_port, Config.ldap_binddn)) self.ldap_conn.bind() break self.set_cbm() self.default_bucket = Config.couchbase_bucket_prefix
def log_ldap_result(self, ldap_operation_result): if not ldap_operation_result: base.logIt("Ldap modify operation failed {}".format( str(self.ldap_conn.result))) base.logIt( "Ldap modify operation failed {}".format( str(self.ldap_conn.result)), True)
def logIt(self, *args, **kwargs): #if pbar in args, pass to progress bar if 'pbar' in kwargs: ptype = kwargs.pop('pbar') msg = kwargs['msg'] if 'msg' in kwargs else args[0] Config.pbar.progress(ptype, msg) base.logIt(*args, **kwargs)
def exec_query(self, query): logIt("Executing n1ql {}".format(query)) data = {'statement': query} result = requests.post(self.n1ql_api, data=data, auth=self.auth, verify=False) self.logIfError(result) return result
def rdm_automapper(self, force=False): if not force and self.Base: return base.logIt("Reflecting ORM tables") self.metadata.reflect(self.engine) self.Base = sqlalchemy.ext.automap.automap_base(metadata=self.metadata) self.Base.prepare() base.logIt("Reflected tables {}".format(list(self.metadata.tables.keys())))
def dn_exists_rdbm(self, dn, table): base.logIt("Checking dn {} exists in table {}".format(dn, table)) backend_location = self.get_backend_location_for_dn(dn) if backend_location == BackendTypes.SPANNER: result = self.spanner.exec_sql('SELECT * from {} WHERE dn="{}"'.format(table, dn)) if result and 'rows' in result and result['rows']: return result return sqlalchemy_table = self.Base.classes[table].__table__ return self.session.query(sqlalchemy_table).filter(sqlalchemy_table).filter(sqlalchemy_table.columns.dn == dn).first()
def installPackage(self, packageName, remote=False): base.logIt("Installing " + packageName) install_command, update_command, query_command, check_text = self.get_install_commands() if remote: output = self.run(install_command.format(packageName), shell=True) else: if base.clone_type == 'deb': output = self.run([paths.cmd_dpkg, '--install', packageName]) else: output = self.run([paths.cmd_rpm, '--install', '--verbose', '--hash', packageName]) return output
def import_schema(self, schema_file): if self.moddb == BackendTypes.LDAP: base.logIt("Importing schema {}".format(schema_file)) parser = ldif_utils.myLdifParser(schema_file) parser.parse() for dn, entry in parser.entries: if 'changetype' in entry: entry.pop('changetype') if 'add' in entry: entry.pop('add') for entry_type in entry: for e in entry[entry_type]: base.logIt("Adding to schema, type: {} value: {}".format(entry_type, e)) ldap_operation_result = self.ldap_conn.modify(dn, {entry_type: [ldap3.MODIFY_ADD, e]}) self.log_ldap_result(ldap_operation_result) #we need re-bind after schema operations self.ldap_conn.rebind()
def exec_sql(self, cmd): base.logIt("Executing SQL query: {}".format(cmd)) ses = self.get_session() data = {'fields': [], 'rows': []} with ses.transaction() as tr: try: result = tr.execute_sql(cmd) data['rows'] = list(result) for f in result.fields: data['fields'].append({ 'name': f.name, 'type': f.type_.code.name }) except: pass return data
def bind(self, use_ssl=True, force=False): setattr(base.current_app, self.__class__.__name__, self) base.logIt("Bind to database") if Config.mappingLocations['default'] == 'ldap': self.moddb = BackendTypes.LDAP elif Config.mappingLocations['default'] == 'rdbm': self.read_jans_schema() if Config.rdbm_type == 'mysql': self.moddb = BackendTypes.MYSQL elif Config.rdbm_type == 'pgsql': self.moddb = BackendTypes.PGSQL elif Config.rdbm_type == 'spanner': self.moddb = BackendTypes.SPANNER self.spanner = Spanner() else: self.moddb = BackendTypes.COUCHBASE if not hasattr(self, 'ldap_conn') or force: for group in Config.mappingLocations: if Config.mappingLocations[group] == 'ldap': base.logIt("Making LDAP Conncetion") ldap_server = ldap3.Server(Config.ldap_hostname, port=int(Config.ldaps_port), use_ssl=use_ssl) self.ldap_conn = ldap3.Connection( ldap_server, user=Config.ldap_binddn, password=Config.ldapPass, ) base.logIt( "Making LDAP Connection to host {}:{} with user {}". format(Config.ldap_hostname, Config.ldaps_port, Config.ldap_binddn)) self.ldap_conn.bind() break if not self.session or force: for group in Config.mappingLocations: if Config.mappingLocations[group] == 'rdbm': if Config.rdbm_type in ('mysql', 'pgsql'): base.logIt("Making MySql Conncetion") result = self.mysqlconnection() if not result[0]: print("{}FATAL: {}{}".format( colors.FAIL, result[1], colors.ENDC)) break self.set_cbm() self.default_bucket = Config.couchbase_bucket_prefix
def search(self, search_base, search_filter='(objectClass=*)', search_scope=ldap3.LEVEL): base.logIt("Searching database for dn {} with filter {}".format( search_base, search_filter)) backend_location = self.get_backend_location_for_dn(search_base) if backend_location == BackendTypes.LDAP: if self.ldap_conn.search(search_base=search_base, search_filter=search_filter, search_scope=search_scope, attributes=['*']): key, document = ldif_utils.get_document_from_entry( self.ldap_conn.response[0]['dn'], self.ldap_conn.response[0]['attributes']) return document if backend_location == BackendTypes.COUCHBASE: key = ldif_utils.get_key_from(search_base) bucket = self.get_bucket_for_key(key) if search_scope == ldap3.BASE: n1ql = 'SELECT * FROM `{}` USE KEYS "{}"'.format(bucket, key) else: parsed_dn = dnutils.parse_dn( search_filter.strip('(').strip(')')) attr = parsed_dn[0][0] val = parsed_dn[0][1] if '*' in val: search_clause = 'LIKE "{}"'.format(val.replace('*', '%')) else: search_clause = '="{}"'.format(val.replace('*', '%')) n1ql = 'SELECT * FROM `{}` WHERE `{}` {}'.format( bucket, attr, search_clause) result = self.cbm.exec_query(n1ql) if result.ok: data = result.json() if data.get('results'): return data['results'][0][bucket]
def __init__(self): if Config.spanner_emulator_host: base.logIt("Using spanner emulator at {}".format( Config.spanner_emulator_host)) self.client = spanner.Client(project=Config.spanner_project, client_options={ 'api_endpoint': '{}:9010'.format( Config.spanner_emulator_host) }, credentials=AnonymousCredentials()) else: base.logIt("Using spanner with credidentals".format( Config.google_application_credentials)) os.environ[ "GOOGLE_APPLICATION_CREDENTIALS"] = Config.google_application_credentials self.client = spanner.Client() self.instance = self.client.instance(Config.spanner_instance) self.database = self.instance.database(Config.spanner_database)
def sqlconnection(self, log=True): base.logIt("Making {} Connection to {}:{}/{} with user {}".format(Config.rdbm_type.upper(), Config.rdbm_host, Config.rdbm_port, Config.rdbm_db, Config.rdbm_user)) db_str = 'mysql+pymysql' if Config.rdbm_type == 'mysql' else 'postgresql+psycopg2' bind_uri = '{}://{}:{}@{}:{}/{}'.format( db_str, Config.rdbm_user, Config.rdbm_password, Config.rdbm_host, Config.rdbm_port, Config.rdbm_db, ) if Config.rdbm_type == 'mysql': bind_uri += '?charset=utf8mb4' try: self.engine = sqlalchemy.create_engine(bind_uri) logging.basicConfig(filename=os.path.join(Config.install_dir, 'logs/sqlalchemy.log')) logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) Session = sqlalchemy.orm.sessionmaker(bind=self.engine) self.session = Session() self.metadata = sqlalchemy.MetaData() self.session.connection() base.logIt("{} Connection was successful".format(Config.rdbm_type.upper())) return True, self.session except Exception as e: if log: base.logIt("Can't connect to {} server: {}".format(Config.rdbm_type.upper(), str(e), True)) return False, e
def logIfError(self, result): try: js = result.json() if 'errors' in js: msg = "Error executing query: {}".format(', '.join([err['msg'] for err in js['errors']])) logIt(msg) logIt(msg, True) else: logIt("Query Result: {}".format(str(js))) except: pass
def exec_rdbm_query(self, query, getresult=False): base.logIt("Executing {} Query: {}".format(Config.rdbm_type, query)) if Config.rdbm_type in ('mysql', 'pgsql'): try: qresult = self.session.execute(query) self.session.commit() except Exception as e: base.logIt("ERROR executing query {}".format(e.args)) base.logIt("ERROR executing query {}".format(e.args), True) else: if getresult == 1: return qresult.first() elif getresult: return qresult.fetchall()
def exec_rdbm_query(self, query, getresult=False): base.logIt("Executing {} Query: {}".format(Config.rdbm_type, query)) if Config.rdbm_type in ('mysql', 'pgsql'): try: qresult = self.session.execute(query) self.session.commit() except Exception as e: base.logIt("ERROR executing query {}".format(e.args)) base.logIt("ERROR executing query {}".format(e.args), True) else: if getresult == 1: return qresult.first() elif getresult: return qresult.fetchall() elif Config.rdbm_type == 'spanner': if query.startswith('CREATE TABLE'): self.spanner.create_table(query)
def dn_exists(self, dn): base.logIt("Querying LDAP for dn {}".format(dn)) return self.ldap_conn.search(search_base=dn, search_filter='(objectClass=*)', search_scope=ldap3.BASE, attributes=['*'])
def print_or_log(msg): print(msg) if argsp.x else base.logIt(msg)
def import_ldif(self, ldif_files, bucket=None, force=None): base.logIt("Importing ldif file(s): {} ".format(', '.join(ldif_files))) sql_data_fn = os.path.join(Config.outputFolder, Config.rdbm_type, 'jans_data.sql') for ldif_fn in ldif_files: base.logIt("Importing entries from " + ldif_fn) parser = ldif_utils.myLdifParser(ldif_fn) parser.parse() for dn, entry in parser.entries: backend_location = force if force else self.get_backend_location_for_dn(dn) if backend_location == BackendTypes.LDAP: if 'add' in entry and 'changetype' in entry: base.logIt("LDAP modify add dn:{} entry:{}".format(dn, dict(entry))) change_attr = entry['add'][0] ldap_operation_result = self.ldap_conn.modify(dn, {change_attr: [(ldap3.MODIFY_ADD, entry[change_attr])]}) self.log_ldap_result(ldap_operation_result) elif 'replace' in entry and 'changetype' in entry: base.logIt("LDAP modify replace dn:{} entry:{}".format(dn, dict(entry))) change_attr = entry['replace'][0] ldap_operation_result = self.ldap_conn.modify(dn, {change_attr: [(ldap3.MODIFY_REPLACE, [entry[change_attr][0]])]}) self.log_ldap_result(ldap_operation_result) elif not self.dn_exists(dn): base.logIt("Adding LDAP dn:{} entry:{}".format(dn, dict(entry))) ldap_operation_result = self.ldap_conn.add(dn, attributes=entry) self.log_ldap_result(ldap_operation_result) elif backend_location in (BackendTypes.MYSQL, BackendTypes.PGSQL): if self.Base is None: self.rdm_automapper() if 'add' in entry and 'changetype' in entry: attribute = entry['add'][0] new_val = entry[attribute] sqlalchObj = self.get_sqlalchObj_for_dn(dn) if sqlalchObj: if isinstance(sqlalchObj.__table__.columns[attribute].type, self.json_dialects_instance): cur_val = copy.deepcopy(getattr(sqlalchObj, attribute)) for val_ in new_val: cur_val['v'].append(val_) setattr(sqlalchObj, attribute, cur_val) else: setattr(sqlalchObj, attribute, new_val[0]) self.session.commit() else: base.logIt("Can't find current value for repmacement of {}".replace(str(entry)), True) continue elif 'replace' in entry and 'changetype' in entry: attribute = entry['replace'][0] new_val = self.get_rdbm_val(attribute, entry[attribute]) sqlalchObj = self.get_sqlalchObj_for_dn(dn) if sqlalchObj: setattr(sqlalchObj, attribute, new_val) self.session.commit() else: base.logIt("Can't find current value for repmacement of {}".replace(str(entry)), True) continue else: vals = {} dn_parsed = dnutils.parse_dn(dn) rdn_name = dn_parsed[0][0] objectClass = entry.get('objectClass') or entry.get('objectclass') if objectClass: if 'top' in objectClass: objectClass.remove('top') if len(objectClass) == 1 and objectClass[0].lower() == 'organizationalunit': continue objectClass = objectClass[-1] vals['doc_id'] = dn_parsed[0][1] vals['dn'] = dn vals['objectClass'] = objectClass #entry.pop(rdn_name) if 'objectClass' in entry: entry.pop('objectClass') elif 'objectclass' in entry: entry.pop('objectclass') table_name = objectClass if self.dn_exists_rdbm(dn, table_name): base.logIt("DN {} exsits in {} skipping".format(dn, Config.rdbm_type)) continue for lkey in entry: vals[lkey] = self.get_rdbm_val(lkey, entry[lkey]) sqlalchCls = self.Base.classes[table_name] for col in sqlalchCls.__table__.columns: if isinstance(col.type, self.json_dialects_instance) and not col.name in vals: vals[col.name] = {'v': []} sqlalchObj = sqlalchCls() for v in vals: setattr(sqlalchObj, v, vals[v]) base.logIt("Adding {}".format(sqlalchObj.doc_id)) self.session.add(sqlalchObj) self.session.commit() elif backend_location == BackendTypes.COUCHBASE: if len(entry) < 3: continue key, document = ldif_utils.get_document_from_entry(dn, entry) cur_bucket = bucket if bucket else self.get_bucket_for_dn(dn) base.logIt("Addnig document {} to Couchebase bucket {}".format(key, cur_bucket)) n1ql_list = [] if 'changetype' in document: if 'replace' in document: attribute = document['replace'] n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=%s' % (cur_bucket, key, attribute, json.dumps(document[attribute]))) elif 'add' in document: attribute = document['add'] result = self.check_attribute_exists(key, attribute) data = document[attribute] if result: if isinstance(data, list): for d in data: n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)' % (cur_bucket, key, attribute, attribute, json.dumps(d))) else: n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)' % (cur_bucket, key, attribute, attribute, json.dumps(data))) else: if attribute in attribDataTypes.listAttributes and not isinstance(data, list): data = [data] n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=%s' % (cur_bucket, key, attribute, json.dumps(data))) else: for k in document: try: kdata = json.loads(document[k]) if isinstance(kdata, dict): document[k] = kdata except: pass n1ql_list.append('UPSERT INTO `%s` (KEY, VALUE) VALUES ("%s", %s)' % (cur_bucket, key, json.dumps(document))) for q in n1ql_list: self.cbm.exec_query(q)
jansInstaller = JansInstaller() jansInstaller.initialize() print() print("Installing Janssen Server...\n\nFor more info see:\n {} \n {}\n". format(paths.LOG_FILE, paths.LOG_ERROR_FILE)) print("Detected OS : {} {} {}".format('snap' if base.snap else '', base.os_type, base.os_version)) print("Janssen Version : {}".format(Config.oxVersion)) print("Detected init : {}".format(base.os_initdaemon)) print("Detected Apache : {}".format(base.determineApacheVersion())) print() setup_loaded = {} if setupOptions['setup_properties']: base.logIt('%s Properties found!\n' % setupOptions['setup_properties']) setup_loaded = propertiesUtils.load_properties( setupOptions['setup_properties']) elif os.path.isfile(Config.setup_properties_fn): base.logIt('%s Properties found!\n' % Config.setup_properties_fn) setup_loaded = propertiesUtils.load_properties(Config.setup_properties_fn) elif os.path.isfile(Config.setup_properties_fn + '.enc'): base.logIt('%s Properties found!\n' % Config.setup_properties_fn + '.enc') setup_loaded = propertiesUtils.load_properties(Config.setup_properties_fn + '.enc') collectProperties = CollectProperties() if os.path.exists(Config.jans_properties_fn): collectProperties.collect() collectProperties.save() Config.installed_instance = True
def dn_exists_rdbm(self, dn, table): base.logIt("Checking dn {} exists in table {}".format(dn, table)) sqlalchemy_table = self.Base.classes[table].__table__ return self.session.query(sqlalchemy_table).filter(sqlalchemy_table).filter(sqlalchemy_table.columns.dn == dn).first()
def import_ldif(self, ldif_files, bucket=None, force=None): base.logIt("Importing ldif file(s): {} ".format(', '.join(ldif_files))) sql_data_fn = os.path.join(Config.outputFolder, Config.rdbm_type, 'jans_data.sql') for ldif_fn in ldif_files: base.logIt("Importing entries from " + ldif_fn) parser = ldif_utils.myLdifParser(ldif_fn) parser.parse() for dn, entry in parser.entries: backend_location = force if force else self.get_backend_location_for_dn(dn) if backend_location == BackendTypes.LDAP: if 'add' in entry and 'changetype' in entry: base.logIt("LDAP modify add dn:{} entry:{}".format(dn, dict(entry))) change_attr = entry['add'][0] ldap_operation_result = self.ldap_conn.modify(dn, {change_attr: [(ldap3.MODIFY_ADD, entry[change_attr])]}) self.log_ldap_result(ldap_operation_result) elif 'replace' in entry and 'changetype' in entry: base.logIt("LDAP modify replace dn:{} entry:{}".format(dn, dict(entry))) change_attr = entry['replace'][0] ldap_operation_result = self.ldap_conn.modify(dn, {change_attr: [(ldap3.MODIFY_REPLACE, [entry[change_attr][0]])]}) self.log_ldap_result(ldap_operation_result) elif not self.dn_exists(dn): base.logIt("Adding LDAP dn:{} entry:{}".format(dn, dict(entry))) ldap_operation_result = self.ldap_conn.add(dn, attributes=entry) self.log_ldap_result(ldap_operation_result) elif backend_location in (BackendTypes.MYSQL, BackendTypes.PGSQL): if self.Base is None: self.rdm_automapper() # TODO: inserting data to sub tables to be implemented for mysql and pgsql if 'add' in entry and 'changetype' in entry: attribute = entry['add'][0] new_val = entry[attribute] sqlalchObj = self.get_sqlalchObj_for_dn(dn) if sqlalchObj: if isinstance(sqlalchObj.__table__.columns[attribute].type, self.json_dialects_instance): cur_val = copy.deepcopy(getattr(sqlalchObj, attribute)) for val_ in new_val: cur_val['v'].append(val_) setattr(sqlalchObj, attribute, cur_val) else: setattr(sqlalchObj, attribute, new_val[0]) self.session.commit() else: base.logIt("Can't find current value for repmacement of {}".replace(str(entry)), True) continue elif 'replace' in entry and 'changetype' in entry: attribute = entry['replace'][0] new_val = self.get_rdbm_val(attribute, entry[attribute]) sqlalchObj = self.get_sqlalchObj_for_dn(dn) if sqlalchObj: setattr(sqlalchObj, attribute, new_val) self.session.commit() else: base.logIt("Can't find current value for repmacement of {}".replace(str(entry)), True) continue else: vals = {} dn_parsed = dnutils.parse_dn(dn) rdn_name = dn_parsed[0][0] objectClass = self.get_clean_objcet_class(entry) if objectClass.lower() == 'organizationalunit': continue vals['doc_id'] = dn_parsed[0][1] vals['dn'] = dn vals['objectClass'] = objectClass #entry.pop(rdn_name) if 'objectClass' in entry: entry.pop('objectClass') elif 'objectclass' in entry: entry.pop('objectclass') table_name = objectClass if self.dn_exists_rdbm(dn, table_name): base.logIt("DN {} exsits in {} skipping".format(dn, Config.rdbm_type)) continue for lkey in entry: vals[lkey] = self.get_rdbm_val(lkey, entry[lkey]) sqlalchCls = self.Base.classes[table_name] for col in sqlalchCls.__table__.columns: if isinstance(col.type, self.json_dialects_instance) and not col.name in vals: vals[col.name] = {'v': []} sqlalchObj = sqlalchCls() for v in vals: setattr(sqlalchObj, v, vals[v]) base.logIt("Adding {}".format(sqlalchObj.doc_id)) self.session.add(sqlalchObj) self.session.commit() elif backend_location == BackendTypes.SPANNER: if 'add' in entry and 'changetype' in entry: table = self.get_spanner_table_for_dn(dn) doc_id = self.get_doc_id_from_dn(dn) change_attr = entry['add'][0] if table: doc_id = self.get_doc_id_from_dn(dn) if self.in_subtable(table, change_attr): sub_table = '{}_{}'.format(table, change_attr) for subval in entry[change_attr]: typed_val = self.get_rdbm_val(change_attr, subval, rdbm_type='spanner') dict_doc_id = self.get_sha_digest(typed_val) self.spanner.insert_data(table=sub_table, columns=['doc_id', 'dict_doc_id', change_attr], values=[[doc_id, typed_val, typed_val]]) else: data = self.spanner.exec_sql('SELECT {} FROM {} WHERE doc_id="{}"'.format(entry['add'][0], table, doc_id)) if data.get('rows'): cur_data = [] if 'rows' in data and data['rows'] and data['rows'][0] and data['rows'][0][0]: cur_data = data['rows'][0][0] for cur_val in entry[change_attr]: typed_val = self.get_rdbm_val(change_attr, cur_val, rdbm_type='spanner') cur_data.append(typed_val) self.spanner.update_data(table=table, columns=['doc_id', change_attr], values=[[doc_id, cur_data]]) elif 'replace' in entry and 'changetype' in entry: table = self.get_spanner_table_for_dn(dn) doc_id = self.get_doc_id_from_dn(dn) replace_attr = entry['replace'][0] typed_val = self.get_rdbm_val(replace_attr, entry[replace_attr], rdbm_type='spanner') if self.in_subtable(table, replace_attr): sub_table = '{}_{}'.format(table, replace_attr) # TODO: how to replace ? #for subval in typed_val: # self.spanner.update_data(table=sub_table, columns=['doc_id', replace_attr], values=[[doc_id, subval]]) else: self.spanner.update_data(table=table, columns=['doc_id', replace_attr], values=[[doc_id, typed_val]]) else: vals = {} dn_parsed = dnutils.parse_dn(dn) rdn_name = dn_parsed[0][0] objectClass = objectClass = self.get_clean_objcet_class(entry) if objectClass.lower() == 'organizationalunit': continue doc_id = dn_parsed[0][1] vals['doc_id'] = doc_id vals['dn'] = dn vals['objectClass'] = objectClass if 'objectClass' in entry: entry.pop('objectClass') elif 'objectclass' in entry: entry.pop('objectclass') table_name = objectClass subtable_data = [] for lkey in entry: spanner_vals = self.get_rdbm_val(lkey, entry[lkey], rdbm_type='spanner') if not self.in_subtable(table_name, lkey): vals[lkey] = spanner_vals else: sub_table = '{}_{}'.format(table_name, lkey) sub_table_columns = ['doc_id', 'dict_doc_id', lkey] sub_table_values = [] for subtableval in spanner_vals: dict_doc_id = self.get_sha_digest(subtableval) sub_table_values.append([doc_id, dict_doc_id, subtableval]) subtable_data.append((sub_table, sub_table_columns, sub_table_values)) columns = [ *vals.keys() ] values = [ vals[lkey] for lkey in columns ] self.spanner.insert_data(table=table_name, columns=columns, values=[values]) for sdata in subtable_data: self.spanner.insert_data(table=sdata[0], columns=sdata[1], values=sdata[2]) elif backend_location == BackendTypes.COUCHBASE: if len(entry) < 3: continue key, document = ldif_utils.get_document_from_entry(dn, entry) cur_bucket = bucket if bucket else self.get_bucket_for_dn(dn) base.logIt("Addnig document {} to Couchebase bucket {}".format(key, cur_bucket)) n1ql_list = [] if 'changetype' in document: if 'replace' in document: attribute = document['replace'] n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=%s' % (cur_bucket, key, attribute, json.dumps(document[attribute]))) elif 'add' in document: attribute = document['add'] result = self.check_attribute_exists(key, attribute) data = document[attribute] if result: if isinstance(data, list): for d in data: n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)' % (cur_bucket, key, attribute, attribute, json.dumps(d))) else: n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)' % (cur_bucket, key, attribute, attribute, json.dumps(data))) else: if attribute in attribDataTypes.listAttributes and not isinstance(data, list): data = [data] n1ql_list.append('UPDATE `%s` USE KEYS "%s" SET `%s`=%s' % (cur_bucket, key, attribute, json.dumps(data))) else: for k in document: try: kdata = json.loads(document[k]) if isinstance(kdata, dict): document[k] = kdata except: pass n1ql_list.append('UPSERT INTO `%s` (KEY, VALUE) VALUES ("%s", %s)' % (cur_bucket, key, json.dumps(document))) for q in n1ql_list: self.cbm.exec_query(q)
def search(self, search_base, search_filter='(objectClass=*)', search_scope=ldap3.LEVEL, fetchmany=False): base.logIt("Searching database for dn {} with filter {}".format(search_base, search_filter)) backend_location = self.get_backend_location_for_dn(search_base) if backend_location == BackendTypes.LDAP: if self.ldap_conn.search(search_base=search_base, search_filter=search_filter, search_scope=search_scope, attributes=['*']): if not fetchmany: key, document = ldif_utils.get_document_from_entry(self.ldap_conn.response[0]['dn'], self.ldap_conn.response[0]['attributes']) return document documents = [] for result in self.ldap_conn.response: key, document = ldif_utils.get_document_from_entry(result['dn'], result['attributes']) documents.append((key, document)) return documents if backend_location in (BackendTypes.MYSQL, BackendTypes.PGSQL, BackendTypes.SPANNER): if backend_location != BackendTypes.SPANNER and self.Base is None: self.rdm_automapper() s_table = None where_clause = '' search_list = [] if '&' in search_filter: re_match = re.match('\(&\((.*?)=(.*?)\)\((.*?)=(.*?)\)', search_filter) if re_match: re_list = re_match.groups() search_list.append((re_list[0], re_list[1])) search_list.append((re_list[2], re_list[3])) else: re_match = re.match('\((.*?)=(.*?)\)', search_filter) if re_match: re_list = re_match.groups() search_list.append((re_list[0], re_list[1])) for col, val in search_list: if col.lower() == 'objectclass': s_table = val break if not s_table: return if backend_location == BackendTypes.SPANNER: if fetchmany: retVal = [] else: retVal = {} for col, val in search_list: if val == '*': continue if col.lower() == 'objectclass': s_table = val else: val = val.replace('*', '%') q_operator = 'LIKE' if '%' in val else '=' where_clause = 'AND {} {} "{}"'.format(col, q_operator, val) if not s_table: return retVal if search_scope == ldap3.BASE: dn_clause = 'dn = "{}"'.format(search_base) else: dn_clause = 'dn LIKE "%{}"'.format(search_base) sql_cmd = 'SELECT * FROM {} WHERE ({}) {}'.format(s_table, dn_clause, where_clause) data = self.spanner.exec_sql(sql_cmd) if not data.get('rows'): return retVal n = len(data['rows']) if fetchmany else 1 for j in range(n): row = data['rows'][j] row_dict = {} for i, field in enumerate(data['fields']): val = row[i] if val: if field['type'] == 'INT64': val = int(val) row_dict[field['name']] = val if fetchmany: retVal.append(row_dict) else: retVal = row_dict break return retVal sqlalchemy_table = self.Base.classes[s_table] sqlalchemyQueryObject = self.session.query(sqlalchemy_table) for col, val in search_list: if val == '*': continue if col.lower() != 'objectclass': val = val.replace('*', '%') sqlalchemyCol = getattr(sqlalchemy_table, col) if '%' in val: sqlalchemyQueryObject = sqlalchemyQueryObject.filter(sqlalchemyCol.like(val)) else: sqlalchemyQueryObject = sqlalchemyQueryObject.filter(sqlalchemyCol == val) if search_scope == ldap3.BASE: sqlalchemyQueryObject = sqlalchemyQueryObject.filter(sqlalchemy_table.dn == search_base) else: sqlalchemyQueryObject = sqlalchemyQueryObject.filter(sqlalchemy_table.dn.like('%'+search_base)) if fetchmany: result = sqlalchemyQueryObject.all() return [ item.__dict__ for item in result ] else: result = sqlalchemyQueryObject.first() if result: return result.__dict__ if backend_location == BackendTypes.COUCHBASE: key = ldif_utils.get_key_from(search_base) bucket = self.get_bucket_for_key(key) if search_scope == ldap3.BASE: n1ql = 'SELECT * FROM `{}` USE KEYS "{}"'.format(bucket, key) else: if '&' in search_filter: re_match = re.match('\(&\((.*?)\)\((.*?)\)\)', search_filter) if re_match: re_list = re_match.groups() dn_to_parse = re_list[0] if 'objectclass' in re_list[1].lower() else re_list[1] else: dn_to_parse = search_filter.strip('(').strip(')') parsed_dn = dnutils.parse_dn(dn_to_parse) attr = parsed_dn[0][0] val = parsed_dn[0][1] if '*' in val: search_clause = 'LIKE "{}"'.format(val.replace('*', '%')) else: search_clause = '="{}"'.format(val.replace('*', '%')) n1ql = 'SELECT * FROM `{}` WHERE `{}` {}'.format(bucket, attr, search_clause) result = self.cbm.exec_query(n1ql) if result.ok: data = result.json() if data.get('results'): if fetchmany: return [ item[bucket] for item in data['results'] ] else: return data['results'][0][bucket]
def import_ldif(self, ldif_files, bucket=None, force=None): for ldif_fn in ldif_files: parser = ldif_utils.myLdifParser(ldif_fn) parser.parse() for dn, entry in parser.entries: backend_location = force if force else self.get_backend_location_for_dn( dn) if backend_location == BackendTypes.LDAP: if not self.dn_exists(dn): base.logIt("Adding LDAP dn:{} entry:{}".format( dn, dict(entry))) self.ldap_conn.add(dn, attributes=entry) elif backend_location == BackendTypes.COUCHBASE: if len(entry) < 3: continue key, document = ldif_utils.get_document_from_entry( dn, entry) cur_bucket = bucket if bucket else self.get_bucket_for_dn( dn) base.logIt( "Addnig document {} to Couchebase bucket {}".format( key, cur_bucket)) n1ql_list = [] if 'changetype' in document: if 'replace' in document: attribute = document['replace'] n1ql_list.append( 'UPDATE `%s` USE KEYS "%s" SET `%s`=%s' % (cur_bucket, key, attribute, json.dumps(document[attribute]))) elif 'add' in document: attribute = document['add'] result = self.check_attribute_exists( key, attribute) data = document[attribute] if result: if isinstance(data, list): for d in data: n1ql_list.append( 'UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)' % (cur_bucket, key, attribute, attribute, json.dumps(d))) else: n1ql_list.append( 'UPDATE `%s` USE KEYS "%s" SET `%s`=ARRAY_APPEND(`%s`, %s)' % (cur_bucket, key, attribute, attribute, json.dumps(data))) else: if attribute in attribDataTypes.listAttributes and not isinstance( data, list): data = [data] n1ql_list.append( 'UPDATE `%s` USE KEYS "%s" SET `%s`=%s' % (cur_bucket, key, attribute, json.dumps(data))) else: n1ql_list.append( 'UPSERT INTO `%s` (KEY, VALUE) VALUES ("%s", %s)' % (cur_bucket, key, json.dumps(document))) for q in n1ql_list: self.cbm.exec_query(q)
def do_installation(): jansProgress.before_start() jansProgress.start() try: jettyInstaller.calculate_selected_aplications_memory() if not Config.installed_instance: jansInstaller.configureSystem() jansInstaller.make_salt() oxauthInstaller.make_salt() if not base.snap: jreInstaller.start_installation() jettyInstaller.start_installation() jythonInstaller.start_installation() jansInstaller.copy_scripts() jansInstaller.encode_passwords() Config.ldapCertFn = Config.opendj_cert_fn Config.ldapTrustStoreFn = Config.opendj_p12_fn Config.encoded_ldapTrustStorePass = Config.encoded_opendj_p12_pass jansInstaller.prepare_base64_extension_scripts() jansInstaller.render_templates() jansInstaller.render_configuration_template() if not base.snap: jansInstaller.update_hostname() jansInstaller.set_ulimits() jansInstaller.copy_output() jansInstaller.setup_init_scripts() # Installing jans components if Config.wrends_install: openDjInstaller.start_installation() if Config.cb_install: couchbaseInstaller.start_installation() if (Config.installed_instance and 'installHttpd' in Config.addPostSetupService) or ( not Config.installed_instance and Config.installHttpd): httpdinstaller.configure() if (Config.installed_instance and 'installOxAuth' in Config.addPostSetupService) or ( not Config.installed_instance and Config.installOxAuth): oxauthInstaller.start_installation() if (Config.installed_instance and 'installFido2' in Config.addPostSetupService) or ( not Config.installed_instance and Config.installFido2): fidoInstaller.start_installation() if (Config.installed_instance and 'installScimServer' in Config.addPostSetupService ) or (not Config.installed_instance and Config.installScimServer): scimInstaller.start_installation() if (Config.installed_instance and configApiInstaller.install_var in Config.addPostSetupService) or ( not Config.installed_instance and Config.get(configApiInstaller.install_var)): configApiInstaller.start_installation() #if (Config.installed_instance and 'installOxd' in Config.addPostSetupService) or (not Config.installed_instance and Config.installOxd): # oxdInstaller.start_installation() if argsp.t: testDataLoader.load_test_data() jansProgress.progress(PostSetup.service_name, "Saving properties") propertiesUtils.save_properties() time.sleep(2) for service in jansProgress.services: if service['app_type'] == static.AppType.SERVICE: jansProgress.progress( PostSetup.service_name, "Starting {}".format(service['name'].title())) time.sleep(2) service['object'].stop() service['object'].start() jansInstaller.post_install_tasks() jansProgress.progress(static.COMPLETED) print() for m in Config.post_messages: print(m) except: base.logIt("FATAL", True, True)
def do_installation(): if not GSA: gluuProgress.before_start() gluuProgress.start() try: jettyInstaller.calculate_selected_aplications_memory() if not Config.installed_instance: gluuInstaller.configureSystem() gluuInstaller.make_salt() oxauthInstaller.make_salt() if not base.snap: jreInstaller.start_installation() jettyInstaller.start_installation() jythonInstaller.start_installation() nodeInstaller.start_installation() gluuInstaller.copy_scripts() gluuInstaller.encode_passwords() oxtrustInstaller.generate_api_configuration() Config.ldapCertFn = Config.opendj_cert_fn Config.ldapTrustStoreFn = Config.opendj_p12_fn Config.encoded_ldapTrustStorePass = Config.encoded_opendj_p12_pass Config.oxTrustConfigGeneration = 'true' if Config.installSaml else 'false' gluuInstaller.prepare_base64_extension_scripts() gluuInstaller.render_templates() gluuInstaller.render_configuration_template() if not base.snap: gluuInstaller.update_hostname() gluuInstaller.set_ulimits() gluuInstaller.copy_output() gluuInstaller.setup_init_scripts() # Installing gluu components if Config.wrends_install: openDjInstaller.start_installation() if Config.cb_install: couchbaseInstaller.start_installation() if (Config.installed_instance and 'installHttpd' in Config.addPostSetupService) or ( not Config.installed_instance and Config.installHttpd): httpdinstaller.configure() if (Config.installed_instance and 'installOxAuth' in Config.addPostSetupService) or ( not Config.installed_instance and Config.installOxAuth): oxauthInstaller.start_installation() if (Config.installed_instance and 'installOxAuthRP' in Config.addPostSetupService) or ( not Config.installed_instance and Config.installOxAuthRP): oxauthInstaller.install_oxauth_rp() if (Config.installed_instance and 'installOxTrust' in Config.addPostSetupService) or ( not Config.installed_instance and Config.installOxTrust): oxtrustInstaller.start_installation() if (Config.installed_instance and 'installFido2' in Config.addPostSetupService) or ( not Config.installed_instance and Config.installFido2): fidoInstaller.start_installation() if (Config.installed_instance and 'installScimServer' in Config.addPostSetupService ) or (not Config.installed_instance and Config.installScimServer): scimInstaller.start_installation() if (Config.installed_instance and 'installSaml' in Config.addPostSetupService) or ( not Config.installed_instance and Config.installSaml): samlInstaller.start_installation() if (Config.installed_instance and 'installOxd' in Config.addPostSetupService) or ( not Config.installed_instance and Config.installOxd): oxdInstaller.start_installation() if (Config.installed_instance and 'installCasa' in Config.addPostSetupService) or ( not Config.installed_instance and Config.installCasa): casaInstaller.start_installation() if (Config.installed_instance and 'installPassport' in Config.addPostSetupService) or ( not Config.installed_instance and Config.installPassport): passportInstaller.start_installation() if not Config.installed_instance: # this will install only base radiusInstaller.start_installation() if (Config.installed_instance and 'installGluuRadius' in Config.addPostSetupService ) or (not Config.installed_instance and Config.installGluuRadius): radiusInstaller.install_gluu_radius() gluuProgress.progress(PostSetup.service_name, "Saving properties") propertiesUtils.save_properties() time.sleep(2) for service in gluuProgress.services: if service['app_type'] == static.AppType.SERVICE: gluuProgress.progress( PostSetup.service_name, "Starting {}".format(service['name'].title())) time.sleep(2) service['object'].stop() service['object'].start() if service['name'] == 'oxauth' and Config.get( 'installOxAuthRP'): gluuProgress.progress(PostSetup.service_name, "Starting Oxauth-rp") service['object'].start('oxauth-rp') gluuInstaller.post_install_tasks() gluuProgress.progress(static.COMPLETED) if not GSA: print() for m in Config.post_messages: print(m) except: if GSA: gluuProgress.progress(static.ERROR, str(traceback.format_exc())) base.logIt("FATAL", True, True)