def ruleset_addmeta(p_engine, p_username, params, inputfile, fromconnector, bulk): """ Add matadata to Masking engine param1: p_engine: engine name from configuration param2: params: set of required parameters to add meta param3: inputfile: file with table/file definition return 0 if added, non 0 for error """ ret = 0 rulesetname = params["rulesetname"] envname = params["envname"] enginelist = get_list_of_engines(p_engine, p_username) if (params["metaname"] is None) and (inputfile is None) and (fromconnector is None): print_error("Option metaname, inputfile or fromconnector is required") return 1 if ((params["metaname"]) and inputfile) or \ ((params["metaname"]) and fromconnector) or \ (inputfile and fromconnector): print_error( "Option metaname, fromconnector and inputfile are mutally exclusive" ) return 1 if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() rulelist = DxRulesetList(envname) #rulelist.LoadRulesets() ruleref = rulelist.get_rulesetId_by_name(rulesetname) if ruleref: ruleobj = rulelist.get_by_ref(ruleref) if (params["metaname"]): ret = ret + ruleobj.addmeta(params) elif inputfile: ret = ret + ruleobj.addmetafromfile(inputfile, bulk) elif fromconnector: ret = ret + ruleobj.addmetafromfetch(params["fetchfilter"], bulk) else: print_error("Source for add meta is not specified") else: ret = ret + 1 return ret
def environment_delete(p_engine, envname): """ Delete application from Masking engine param1: p_engine: engine name from configuration param2: envname: environment name return 0 if added, non 0 for error """ ret = 0 enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1], engine_tuple[2], engine_tuple[3]) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() envref = envlist.get_environmentId_by_name(envname) if envlist.delete(envref): ret = ret + 1 return ret
def environment_add(p_engine, envname, appname, purpose): """ Add application to Masking engine param1: p_engine: engine name from configuration param2: envname: environment name param3: appname: application name param4: purpose: environment purpose ( MASK ) return 0 if added, non 0 for error """ ret = 0 enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1], engine_tuple[2], engine_tuple[3]) if engine_obj.get_session(): continue envlist = DxEnvironmentList() env = DxEnvironment(engine_obj) # set required properties env.environment_name = envname env.application = appname env.purpose = purpose if envlist.add(env): ret = ret + 1 return ret
def profilejob_add(p_engine, params): """ Add profile job to Masking engine param1: p_engine: engine name from configuration param2: params: job parameters return 0 if added, non 0 for error """ ret = 0 enginelist = get_list_of_engines(p_engine) logger = logging.getLogger() envname = params['envname'] jobname = params['jobname'] rulesetname = params['rulesetname'] profilename = params['profilename'] if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue joblist = DxProfileJobsList() envlist = DxEnvironmentList() rulesetlist = DxRulesetList() profilesetlist = DxProfilesList() profileref = profilesetlist.get_profileSetId_by_name(profilename) envlist.LoadEnvironments() logger.debug("Envname is %s, job name is %s" % (envname, jobname)) rulesetlist.LoadRulesets(envname) rulesetref = rulesetlist.get_rulesetId_by_name(rulesetname) job = DxProfileJob(engine_obj, None) job.ruleset_id = rulesetref job.job_name = jobname job.profile_set_id = profileref for p in masking_params_list: if params[p] is not None: if params[p] == 'Y': value = True elif params[p] == 'N': value = False else: value = params[p] setattr(job, p, value) if joblist.add(job): ret = ret + 1 return ret
def job_selector(**kwargs): """ Select unique job from Masking engine and run function on it param1: p_engine: engine name from configuration param2: jobname: job name param3: envname: environment name param4: function_to_call: name of function to call on connector return 0 if added, non 0 for error """ p_engine = kwargs.get('p_engine') jobname = kwargs.get('jobname') envname = kwargs.get('envname') function_to_call = kwargs.get('function_to_call') joblist_class = kwargs.get('joblist_class') lock = kwargs.get('lock') p_username = kwargs.get('p_username') ret = 0 enginelist = get_list_of_engines(p_engine, p_username) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() joblist = globals()[joblist_class]() joblist.LoadJobs(envname) jobref = joblist.get_jobId_by_name(jobname) if jobref: dynfunc = globals()[function_to_call] ret = ret + dynfunc( jobref=jobref, engine_obj=engine_obj, joblist=joblist, **kwargs) else: if lock: lock.acquire() dxm.lib.DxJobs.DxJobCounter.ret = \ dxm.lib.DxJobs.DxJobCounter.ret + 1 if lock: lock.release() continue return ret
def do_start(**kwargs): """ Start job """ jobref = kwargs.get('jobref') joblist = kwargs.get('joblist') tgt_connector = kwargs.get('tgt_connector') tgt_connector_env = kwargs.get('tgt_connector_env') nowait = kwargs.get('nowait') posno = kwargs.get('posno') lock = kwargs.get('lock') monitor = kwargs.get('monitor') joblist_class = kwargs.get('joblist_class') jobobj = joblist.get_by_ref(jobref) targetconnector = None if jobobj.multi_tenant: envlist = DxEnvironmentList() envlist.LoadEnvironments() if tgt_connector is None: print_error("Target connector is required for multitenant job") lock.acquire() if joblist_class == "DxJobsList": dxm.lib.DxJobs.DxJobCounter.ret = \ dxm.lib.DxJobs.DxJobCounter.ret + 1 else: dxm.lib.DxJobs.DxJobCounter.profileret = \ dxm.lib.DxJobs.DxJobCounter.profileret + 1 lock.release() return 1 connectorlist = DxConnectorsList(tgt_connector_env) #connectorlist.LoadConnectors() targetconnector = connectorlist.get_connectorId_by_name( tgt_connector) if targetconnector: targetconnector = targetconnector[1:] else: print_error("Target connector for multitenant job not found") lock.acquire() if joblist_class == "DxJobsList": dxm.lib.DxJobs.DxJobCounter.ret = \ dxm.lib.DxJobs.DxJobCounter.ret + 1 else: dxm.lib.DxJobs.DxJobCounter.profileret = \ dxm.lib.DxJobs.DxJobCounter.profileret + 1 lock.release() return 1 #staring job jobobj.monitor = monitor return jobobj.start(targetconnector, None, nowait, posno, lock)
def environment_list(p_engine, p_username, format, envname): """ Print list of environments param1: p_engine: engine name from configuration param2: format: output format param3: envname: environemnt name to list, all if None return 0 if environment found """ ret = 0 enginelist = get_list_of_engines(p_engine, p_username) if enginelist is None: return 1 data = DataFormatter() data_header = [ ("Engine name", 30), ("Environment name", 30), ("Application name", 30) ] data.create_header(data_header) data.format_type = format for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue envlist = DxEnvironmentList() # load all objects # envlist.LoadEnvironments() if envname is None: environments = envlist.get_allref() else: environment = envlist.get_environmentId_by_name(envname) if environment is None: ret = ret + 1 continue environments = [environment] for envref in environments: envobj = envlist.get_by_ref(envref) data.data_insert( engine_tuple[0], envobj.environment_name, envobj.application_name ) print("") print (data.data_output(False)) print("") return ret
def ruleset_add(p_engine, p_username, rulesetname, connectorname, envname): """ Add ruleset to Masking engine param1: p_engine: engine name from configuration param2: rulesetname: ruleset name param3: connectorname: connectorname name param4: envname: environment name return 0 if added, non 0 for error """ ret = 0 logger = logging.getLogger() enginelist = get_list_of_engines(p_engine, p_username) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() rulelist = DxRulesetList() connlist = DxConnectorsList(envname) #connlist.LoadConnectors() logger.debug("Connector is %s" % connectorname) connref = connlist.get_connectorId_by_name(connectorname) connobj = connlist.get_by_ref(connref) if connobj: if connobj.is_database: ruleset = DxDatabaseRuleset(engine_obj) ruleset.create_database_ruleset( ruleset_name=rulesetname, database_connector_id=connobj.connectorId, refresh_drops_tables=None) else: ruleset = DxFileRuleset(engine_obj) ruleset.create_file_ruleset( ruleset_name=rulesetname, file_connector_id=connobj.connectorId) if rulelist.add(ruleset): ret = ret + 1 else: ret = ret + 1 return ret
def ruleset_addmeta(p_engine, params, inputfile): """ Add matadata to Masking engine param1: p_engine: engine name from configuration param2: params: set of required parameters to add meta param3: inputfile: file with table/file definition return 0 if added, non 0 for error """ ret = 0 rulesetname = params["rulesetname"] envname = params["envname"] enginelist = get_list_of_engines(p_engine) if (params["metaname"] is None) and (inputfile is None): print_error("Option metaname or inputfile is required") return 1 if (params["metaname"]) and (inputfile): print_error("Option metaname and inputfile are mutally exclusive") return 1 if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1], engine_tuple[2], engine_tuple[3]) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() rulelist = DxRulesetList() rulelist.LoadRulesets(envname) ruleref = rulelist.get_rulesetId_by_name(rulesetname) if ruleref: ruleobj = rulelist.get_by_ref(ruleref) if (params["metaname"]): ret = ret + ruleobj.addmeta(params) else: ret = ret + ruleobj.addmetafromfile(inputfile) else: ret = ret + 1 return ret
def sync_import(p_engine, p_username, envname, inputfile, inputpath, force): """ Load algorithm from file param1: p_engine: engine name from configuration param2: inputfile: input file param3: force: overwrite object return 0 if OK """ ret = 0 enginelist = get_list_of_engines(p_engine, p_username) if inputfile is None and inputpath is None: print_error("Inputfile or inputpath parameter is required") return 1 list_of_opened_files = [] if inputpath: for f in os.listdir(inputpath): fullpath = os.path.join(inputpath, f) if os.path.isfile(fullpath): fh = open(fullpath, "rb") list_of_opened_files.append(fh) else: if inputfile: list_of_opened_files = [inputfile] if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue envlist = DxEnvironmentList() if envname: environment_id = envlist.get_environmentId_by_name(envname) else: environment_id = None for i in list_of_opened_files: syncobj = DxSync(engine_obj) ret = ret + syncobj.importsync(i, environment_id, force) return ret
def LoadJobs(self, environment_name=None): """ Load list of rule sets Return None if OK """ try: api_instance = MaskingJobApi(self.__engine.api_client) execapi = ExecutionApi(self.__engine.api_client) execList = paginator( execapi, "get_all_executions") if execList.response_list: for e in execList.response_list: self.__executionList[e.job_id] = e if environment_name: environment_id = DxEnvironmentList.get_environmentId_by_name( environment_name) if environment_id: jobs = paginator( api_instance, "get_all_masking_jobs", environment_id=environment_id, _request_timeout=self.__engine.get_timeout()) else: return 1 else: jobs = paginator( api_instance, "get_all_masking_jobs", _request_timeout=self.__engine.get_timeout()) if jobs.response_list: for c in jobs.response_list: if c.masking_job_id in self.__executionList: lastExec = self.__executionList[c.masking_job_id] else: lastExec = None job = DxJob(self.__engine, lastExec) job.from_job(c) self.__jobsList[c.masking_job_id] = job else: if environment_name is None: print_error("No jobs found") self.__logger.error("No jobs found") self.__logger.debug("All jobs loaded") except ApiException as e: print_error("Can't load job list %s" % e.body) return 1
def connector_selector(p_engine, connectorname, envname, function_to_call, format='fixed'): """ Select unique connector from Masking engine and run function on it param1: p_engine: engine name from configuration param2: connectorname: connectorname name param3: envname: environment name param4: function_to_call: name of function to call on connector param5: format: format of output, set to fixed return 0 if added, non 0 for error """ ret = 0 enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() connlist = DxConnectorsList() connlist.LoadConnectors(envname) connref = connlist.get_connectorId_by_name(connectorname) if connref: dynfunc = globals()[function_to_call] ret = ret + dynfunc(connref=connref, engine_obj=engine_obj, connlist=connlist, format=format) else: ret = ret + 1 continue return ret
def __init__(self, environment_name=None): """ Constructor :param engine: DxMaskingEngine object """ self.__engine = DxMaskingEngine self.__logger = logging.getLogger() self.__logger.debug("creating DxConnectorsList object") DxEnvironmentList() self.LoadConnectors(environment_name)
def ruleset_deletemeta(p_engine, rulesetname, metaname, envname): """ Delete meta (file, table) from ruleset to Masking engine param1: p_engine: engine name from configuration param2: rulesetname: ruleset name param3: metaname: metaname to delete param4: envname: environment name return 0 if added, non 0 for error """ ret = 0 enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1], engine_tuple[2], engine_tuple[3]) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() rulelist = DxRulesetList() rulelist.LoadRulesets(envname) ruleref = rulelist.get_rulesetId_by_name(rulesetname) metalist = DxMetaList() metalist.LoadMeta(ruleset_id=ruleref) metaref = metalist.get_MetadataId_by_name(metaname) if metaref: if metalist.delete(metaref): ret = ret + 1 else: ret = ret + 1 return ret
def user_update(p_engine, username, firstname, lastname, email, password, user_type, user_environments, user_role): """ Update user in Engine param1: p_engine: engine name from configuration param2: username: user name to add param3: firstname: user first name to add param4: lastname: user last name to add param5: email: user email to add param6: password: user password to add param7: user_type: user type (admin / nonadmin) param8: user_environments: list of comma separated environments param9: user_role: user role name return 0 if user updated """ ret = 0 update = 0 logger = logging.getLogger() enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue userlist = DxUserList() userref = userlist.get_userId_by_name(username) if userref is None: print_error("User %s not found" % username) logger.debug("User %s not found" % username) ret = ret + 1 continue userobj = userlist.get_by_ref(userref) if user_type is not None: update = 1 if user_type == 'nonadmin': if user_role is None: print_error("User role is required for non-admin user") return 1 rolelist = DxRoleList() roleref = rolelist.get_roleId_by_name(user_role) if roleref is None: print_error("Role name %s not found" % user_role) logger.debug("Role name %s not found" % user_role) ret = ret + 1 continue envreflist = [] if user_environments is not None: envlist = DxEnvironmentList() envnamelist = user_environments.split(',') for envname in envnamelist: envref = envlist.get_environmentId_by_name(envname) if envref is None: ret = ret + 1 return 1 else: envreflist.append(envref) userobj.is_admin = False nap = NonAdminProperties() nap.role_id = roleref nap.environment_ids = envreflist userobj.non_admin_properties = nap else: userobj.is_admin = True userobj.delete_nap() print userobj if firstname is not None: update = 1 userobj.first_name = firstname if lastname is not None: update = 1 userobj.last_name = lastname if email is not None: update = 1 userobj.email = email if password is not None: update = 1 try: userobj.password = password except ValueError as e: print str(e) ret = ret + 1 return ret if update == 1: ret = ret + userobj.update() else: print_error("No values set for update.") ret = ret + 1 return ret
def jobs_report_worker(p_engine, p_username, jobname, envname, p_format, last, startdate, enddate, details, jobtype='masking'): """ Print report of jobs param1: p_engine: engine name from configuration param2: jobname: job name to list param3: envname: environemnt name to list jobs from param4: p_format: output format param5: last: display last job only param6: startdate: filter by start date param7: enddate: filter by end date param8: details param9: joblist_class - DxJobsList, DxProfileJobslist return 0 if environment found """ ret = 0 logger = logging.getLogger() enginelist = get_list_of_engines(p_engine, p_username) if enginelist is None: return 1 data = DataFormatter() if jobtype == 'masking': if details: data_header = [ ("Engine name", 30), ("Environment name", 30), ("Job name", 30), ("ExecId", 6), ("Meta name", 12), ("Masked Rows", 10), ("Started", 20), ("Completed", 20), ("Status", 20), ("Runtime", 20) ] else: data_header = [ ("Engine name", 30), ("Environment name", 30), ("Job name", 30), ("Job Id", 6), ("Min Memory", 10), ("Max Memory", 10), ("Streams", 7), ("On The Fly", 10), ("Ruleset Type", 12), ("ExecId", 6), ("Total Rows", 10), ("Masked Rows", 10), ("Started", 20), ("Completed", 20), ("Status", 20), ("Runtime", 20) ] else: data_header = [ ("Engine name", 30), ("Environment name", 30), ("Job name", 30), ("Ruleset Type", 12), ("ExecId", 6), ("Started", 20), ("Completed", 20), ("Status", 20), ("Runtime", 20) ] data.create_header(data_header) data.format_type = p_format for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue # load all objects envlist = DxEnvironmentList() envlist.LoadEnvironments() rulesetlist = DxRulesetList(envname) connectorlist = DxConnectorsList(envname) if jobtype == 'masking': joblist = DxJobsList() else: joblist = DxProfileJobsList() logger.debug("Envname is %s, job name is %s" % (envname, jobname)) joblist.LoadJobs(envname) #rulesetlist.LoadRulesets(envname) #connectorlist.LoadConnectors(envname) if jobname is None: jobs = joblist.get_allref() else: jobs = joblist.get_all_jobId_by_name(jobname) if jobs is None: ret = ret + 1 continue for jobref in jobs: jobobj = joblist.get_by_ref(jobref) rulesetobj = rulesetlist.get_by_ref(jobobj.ruleset_id) # those test are requierd for 5.X engies where API is not showing all types of connectors if rulesetobj is not None: ruleset_type = rulesetobj.type rulename = rulesetobj.ruleset_name connectorobj = connectorlist.get_by_ref(rulesetobj.connectorId) if connectorobj is not None: connectorname = connectorobj.connector_name envobj = envlist.get_by_ref(connectorobj.environment_id) if envobj is not None: envobjname = envobj.environment_name else: envobjname = "N/A" else: connectorname = "N/A" envobjname = "N/A" else: rulename = "N/A" connectorname = "N/A" envobjname = "N/A" ruleset_type = "N/A" if last: lastonly = True else: lastonly = False if lastonly: execlist = [ jobobj.lastExec ] else: if startdate or enddate: execlist = jobobj.filter_executions(startdate, enddate) else: execlist = jobobj.execList if execlist: for jobexec in execlist: if details == False: if jobtype == 'masking': if jobexec is not None: status = jobexec.status execid = jobexec.execution_id rowsmasked = jobexec.rows_masked rowstotal = jobexec.rows_total if jobexec.start_time is not None: starttime = jobexec.start_time.strftime("%Y-%m-%d %H:%M:%S") else: starttime = 'N/A' if (jobexec.end_time is not None) and \ (jobexec.start_time is not None): endtime = jobexec.end_time \ .strftime("%Y-%m-%d %H:%M:%S") runtimetemp = jobexec.end_time \ - jobexec.start_time runtime = str(runtimetemp) else: endtime = 'N/A' runtime = 'N/A' else: status = 'N/A' endtime = 'N/A' starttime = 'N/A' runtime = 'N/A' execid = 'N/A' rowsmasked = 'N/A' rowstotal = 'N/A' data.data_insert( engine_tuple[0], envobjname, jobobj.job_name, jobobj.masking_job_id, jobobj.min_memory, jobobj.max_memory, jobobj.num_input_streams, jobobj.on_the_fly_masking, ruleset_type, execid, rowstotal, rowsmasked, starttime, endtime, status, runtime ) else: if jobexec is not None: status = jobexec.status execid = jobexec.execution_id if jobexec.start_time is not None: starttime = jobexec.start_time.strftime("%Y-%m-%d %H:%M:%S") else: starttime = 'N/A' if (jobexec.end_time is not None) and \ (jobexec.start_time is not None): endtime = jobexec.end_time \ .strftime("%Y-%m-%d %H:%M:%S") runtimetemp = jobexec.end_time \ - jobexec.start_time runtime = str(runtimetemp) else: endtime = 'N/A' runtime = 'N/A' else: status = 'N/A' endtime = 'N/A' starttime = 'N/A' runtime = 'N/A' execid = 'N/A' data.data_insert( engine_tuple[0], envobjname, jobobj.job_name, ruleset_type, execid, starttime, endtime, status, runtime ) else: # details here if jobexec is not None: execid = jobexec.execution_id complist = jobobj.list_execution_component(execid) if complist is not None: for comp in complist: status = comp.status rowsmasked = comp.rows_masked metaname = comp.component_name if comp.start_time is not None: starttime = comp.start_time.strftime("%Y-%m-%d %H:%M:%S") else: starttime = 'N/A' if (comp.end_time is not None) and \ (comp.start_time is not None): endtime = comp.end_time \ .strftime("%Y-%m-%d %H:%M:%S") runtimetemp = comp.end_time \ - comp.start_time runtime = str(runtimetemp) else: endtime = 'N/A' runtime = 'N/A' data.data_insert( engine_tuple[0], envobjname, jobobj.job_name, execid, metaname, rowsmasked, starttime, endtime, status, runtime ) else: print("setting 1") ret = 1 else: # no executions ret = 1 else: # no executions ret = 1 print("") print (data.data_output(False)) print("") return ret
def user_list(p_engine, format, username): """ Print list of users param1: p_engine: engine name from configuration param2: format: output format param3: username: user name to list, all if None return 0 if role found """ ret = 0 enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 data = DataFormatter() data_header = [("Engine name", 30), ("User name", 30), ("First name", 30), ("Last name", 30), ("E-mail", 30), ("Auth type", 10), ("Principal", 30), ("Role name", 30), ("Locked", 6), ("Environment list", 30)] data.create_header(data_header) data.format_type = format for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue userlist = DxUserList() rolelist = DxRoleList() envlist = DxEnvironmentList() # check if ldap is configured appsettingslist = DxAppSettingList() ldapobj = appsettingslist.get_appSetting_by_group_and_name( 'ldap', 'Enable') if ldapobj.setting_value == 'true': msadobj = appsettingslist.get_appSetting_by_group_and_name( 'ldap', 'MsadDomain') else: msadobj = None if username is None: users = userlist.get_allref() else: user = userlist.get_userId_by_name(username) if user is None: ret = ret + 1 continue users = [user] for userref in users: userobj = userlist.get_by_ref(userref) if not userobj.is_admin: roleobj = rolelist.get_by_ref( userobj.non_admin_properties.role_id) if roleobj is not None: rolename = roleobj.role_name else: rolename = 'Not Found' else: rolename = 'Administrator' if userobj.non_admin_properties is not None: if userobj.non_admin_properties.environment_ids: envs = ';'.join([ envlist.get_by_ref(x).environment_name for x in userobj.non_admin_properties.environment_ids ]) else: envs = '' else: envs = '' if userobj.is_locked: locked = 'Locked' else: locked = 'Open' if msadobj is None: authtype = 'NATIVE' principal = '' else: authtype = 'LDAP' principal = '{}@{}'.format(userobj.user_name, msadobj.setting_value) data.data_insert(engine_tuple[0], userobj.user_name, userobj.first_name, userobj.last_name, userobj.email, authtype, principal, rolename, locked, envs) print("") print(data.data_output(False)) print("") return ret
def LoadRulesets_worker(self, environment_name, env_id): """ Load list of rule sets Return None if OK """ self.__rulesetList.clear() DxConnectorsList(environment_name) self.__api = DatabaseRulesetApi self.__fileapi = FileRulesetApi self.__apiexc = ApiException try: api_instance = self.__api(self.__engine.api_client) if environment_name: environment_id = DxEnvironmentList.get_environmentId_by_name( environment_name) if environment_id: database_rulesets = paginator( api_instance, "get_all_database_rulesets", environment_id=environment_id, _request_timeout=self.__engine.get_timeout()) else: return 1 else: if env_id: environment_id = env_id database_rulesets = paginator( api_instance, "get_all_database_rulesets", environment_id=environment_id, _request_timeout=self.__engine.get_timeout()) else: environment_id = None database_rulesets = paginator( api_instance, "get_all_database_rulesets") if database_rulesets.response_list: for c in database_rulesets.response_list: ruleset = DxDatabaseRuleset(self.__engine) ruleset.from_ruleset(c) self.__rulesetList[c.database_ruleset_id] = ruleset else: if environment_id: self.__logger.error("No database ruleset found for " "environment name %s" % environment_name) else: self.__logger.error("No database ruleset found") api_instance = self.__fileapi(self.__engine.api_client) if environment_id: file_rulesets = paginator( api_instance, "get_all_file_rulesets", environment_id=environment_id) else: file_rulesets = paginator( api_instance, "get_all_file_rulesets") if file_rulesets.response_list: for c in file_rulesets.response_list: ruleset = DxFileRuleset(self.__engine) ruleset.from_ruleset(c) self.__rulesetList[c.file_ruleset_id] = ruleset else: if environment_id: self.__logger.error("No file ruleset found for " "environment name %s" % environment_name) else: self.__logger.error("No file ruleset found") except self.__apiexc as e: print_error("Can't load ruleset %s" % e.body) return 1
def jobs_list_worker(p_engine, p_username, jobname, envname, p_format, joblist_class): """ Print list of jobs param1: p_engine: engine name from configuration param2: jobname: job name to list param3: envname: environemnt name to list jobs from param4: p_format: output format param5: joblist_class - DxJobsList, DxProfileJobslist return 0 if environment found """ ret = 0 logger = logging.getLogger() enginelist = get_list_of_engines(p_engine, p_username) if enginelist is None: return 1 data = DataFormatter() data_header = [ ("Engine name", 30), ("Job name", 30), ("Ruleset name", 30), ("Connector name", 30), ("Environment name", 30), ("Completed", 20), ("Status", 20), ("Runtime", 20) ] data.create_header(data_header) data.format_type = p_format for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue # load all objects envlist = DxEnvironmentList() envlist.LoadEnvironments() rulesetlist = DxRulesetList(envname) connectorlist = DxConnectorsList(envname) joblist = globals()[joblist_class]() logger.debug("Envname is %s, job name is %s" % (envname, jobname)) joblist.LoadJobs(envname) #rulesetlist.LoadRulesets(envname) #connectorlist.LoadConnectors(envname) if jobname is None: jobs = joblist.get_allref() else: jobs = joblist.get_all_jobId_by_name(jobname) if jobs is None: ret = ret + 1 continue for jobref in jobs: jobobj = joblist.get_by_ref(jobref) rulesetobj = rulesetlist.get_by_ref(jobobj.ruleset_id) # those test are requierd for 5.X engies where API is not showing all types of connectors if rulesetobj is not None: rulename = rulesetobj.ruleset_name connectorobj = connectorlist.get_by_ref(rulesetobj.connectorId) if connectorobj is not None: connectorname = connectorobj.connector_name envobj = envlist.get_by_ref(connectorobj.environment_id) if envobj is not None: envobjname = envobj.environment_name else: envobjname = "N/A" else: connectorname = "N/A" envobjname = "N/A" else: rulename = "N/A" connectorname = "N/A" envobjname = "N/A" if jobobj.lastExec is not None: status = jobobj.lastExec.status if (jobobj.lastExec.end_time is not None) and \ (jobobj.lastExec.end_time is not None): endtime = jobobj.lastExec.end_time \ .strftime("%Y-%m-%d %H:%M:%S") runtimetemp = jobobj.lastExec.end_time \ - jobobj.lastExec.start_time runtime = str(runtimetemp) else: endtime = 'N/A' runtime = 'N/A' else: status = 'N/A' endtime = 'N/A' runtime = 'N/A' data.data_insert( engine_tuple[0], jobobj.job_name, rulename, connectorname, envobjname, endtime, status, runtime ) print("") print (data.data_output(False)) print("") return ret
def job_add(p_engine, p_username, params): """ Add masking job to Masking engine param1: p_engine: engine name from configuration param2: params: job parameters return 0 if added, non 0 for error """ ret = 0 enginelist = get_list_of_engines(p_engine, p_username) logger = logging.getLogger() envname = params['envname'] jobname = params['jobname'] rulesetname = params['rulesetname'] if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue joblist = DxJobsList() envlist = DxEnvironmentList() envlist.LoadEnvironments() logger.debug("Envname is %s, job name is %s" % (envname, jobname)) rulesetlist = DxRulesetList(envname) rulesetref = rulesetlist.get_rulesetId_by_name(rulesetname) job = DxJob(engine_obj, None) job.create_job(job_name=jobname, ruleset_id=rulesetref) for p in optional_params_list: if params[p] is not None: if params[p] == 'Y': value = True elif params[p] == 'N': value = False else: value = params[p] setattr(job, p, value) dmo = DxDatabaseMaskingOptions() for p in optional_options_list: if params[p] is not None: if params[p] == 'Y': value = True elif params[p] == 'N': value = False else: value = params[p] setattr(dmo, p, value) if params["on_the_fly_masking"] == 'Y' : src_env = params["on_the_fly_src_envname"] src_con = params["on_the_fly_src_connector"] conlist = DxConnectorsList(src_env) conid = conlist.get_connectorId_by_name(src_con) if not conid : return 1 on_the_fly_maskking_srcobj = DxOnTheFlyJob() on_the_fly_maskking_srcobj.connector_id = conid[1:] conObj = conlist.get_by_ref(conid) if conObj.is_database : on_the_fly_maskking_srcobj.connector_type = "DATABASE" else: on_the_fly_maskking_srcobj.connector_type = "FILE" job.on_the_fly_masking_source = on_the_fly_maskking_srcobj if params["prescript"]: scriptname = os.path.basename(params["prescript"].name) prescript = DxMaskingScriptJob(name=scriptname, contents=''.join(params["prescript"].readlines())) dmo.prescript = prescript if params["postscript"]: scriptname = os.path.basename(params["postscript"].name) postscript = DxMaskingScriptJob(name=scriptname, contents = ''.join(params["postscript"].readlines())) dmo.postscript = postscript job.database_masking_options = dmo if joblist.add(job): ret = ret + 1 return ret
def job_add(p_engine, params): """ Add masking job to Masking engine param1: p_engine: engine name from configuration param2: params: job parameters return 0 if added, non 0 for error """ ret = 0 enginelist = get_list_of_engines(p_engine) logger = logging.getLogger() envname = params['envname'] jobname = params['jobname'] rulesetname = params['rulesetname'] if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue joblist = DxJobsList() envlist = DxEnvironmentList() rulesetlist = DxRulesetList() envlist.LoadEnvironments() logger.debug("Envname is %s, job name is %s" % (envname, jobname)) rulesetlist.LoadRulesets(envname) rulesetref = rulesetlist.get_rulesetId_by_name(rulesetname) job = DxJob(engine_obj, None) job.ruleset_id = rulesetref job.job_name = jobname for p in optional_params_list: if params[p] is not None: if params[p] == 'Y': value = True elif params[p] == 'N': value = False else: value = params[p] setattr(job, p, value) dmo = DatabaseMaskingOptions() for p in optional_options_list: if params[p] is not None: if params[p] == 'Y': value = True elif params[p] == 'N': value = False else: value = params[p] setattr(job, p, value) if params["prescript"]: prescript = MaskingJobScript() prescript.contents = ''.join(params["prescript"].readlines()) prescript.name = params["prescript"].name dmo.prescript = prescript if params["postscript"]: postscript = MaskingJobScript() postscript.contents = ''.join(params["postscript"].readlines()) postscript.name = params["postscript"].name dmo.postscript = postscript job.database_masking_options = dmo if joblist.add(job): ret = ret + 1 return ret
def connector_list(p_engine, format, envname, connector_name, details): """ Print list of connectors param1: p_engine: engine name from configuration param2: format: output format param3: envname: environemnt name filter for connectors param4: connector_name: connector name to list param5: details: print connector details return 0 if connector found """ ret = 0 enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 data = DataFormatter() if details: data_header = [("Engine name", 30), ("Environment name", 30), ("Connector name", 30), ("Connector type", 15), ("Hostname", 30), ("Port", 5), ("Schema name", 30), ("Type depended", 100)] else: data_header = [("Engine name", 30), ("Environment name", 30), ("Connector name", 30), ("Connector type", 15)] data.create_header(data_header) data.format_type = format for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue # connlist = DxConnectorsList() # envlist = DxEnvironmentList() # envlist.LoadEnvironments() # connlist.LoadConnectors(envname) DxConnectorsList(envname) if connector_name is None: connectors = DxConnectorsList.get_allref() else: connectors = DxConnectorsList.get_all_connectorId_by_name( connector_name) if connectors is None: ret = ret + 1 continue for connref in connectors: connobj = DxConnectorsList.get_by_ref(connref) if details: rest = ''.join([ '%s = %s ' % (key, value) for (key, value) in connobj.get_type_properties().items() ]) data.data_insert( engine_tuple[0], DxEnvironmentList.get_by_ref( connobj.environment_id).environment_name, connobj.connector_name, connobj.connector_type, connobj.host, connobj.port, connobj.schema_name, rest) else: data.data_insert( engine_tuple[0], DxEnvironmentList.get_by_ref( connobj.environment_id).environment_name, connobj.connector_name, connobj.connector_type) print("") print(data.data_output(False)) print("") return ret
def connector_add(p_engine, params): """ Add application to Masking engine param1: p_engine: engine name from configuration param2: params: dict of parameters needed for connector to add return 0 if added, non 0 for error """ ret = 0 logger = logging.getLogger() enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 envname = params['envname'] schemaName = params['schemaName'] host = params['host'] port = params['port'] password = params['password'] username = params['username'] connname = params['connname'] for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() logger.debug("Envname is %s" % envname) envref = envlist.get_environmentId_by_name(envname) if envref is None: ret = ret + 1 continue connlist = DxConnectorsList() if params['type'] in database_types: if params['type'] == 'oracle': connobj = OracleConnector(engine_obj) elif params['type'] == 'mssql': connobj = MSSQLConnector(engine_obj) elif params['type'] == 'sybase': connobj = SybaseConnector(engine_obj) else: connobj = DxConnector(engine_obj) connobj.database_type = params['type'].upper() connobj.connector_name = connname connobj.schema_name = schemaName connobj.username = username connobj.password = password connobj.host = host if port: connobj.port = port + 0 connobj.sid = params['sid'] connobj.jdbc = params['jdbc'] connobj.environment_id = envref connobj.instance_name = params['instancename'] connobj.database_name = params['databasename'] elif params['type'] in file_types: path = params['path'] connmode = params['servertype'] connobj = DxFileConnector(engine_obj) connobj.is_database = False connobj.connector_name = connname connobj.environment_id = envref connobj.file_type = params['type'].upper() ci = ConnectionInfo() ci.host = host ci.port = port ci.login_name = username ci.password = password ci.path = path ci.connection_mode = connmode.upper() connobj.connection_info = ci else: print_error('Wrong connector type %s' % params['type']) logger.error('Wrong connector type %s' % params['type']) return 1 if connlist.add(connobj): ret = ret + 1 return ret
def column_worker(p_engine, sortby, rulesetname, envname, metaname, columnname, filter_algname, filter_is_masked, algname, is_masked, domainname, function_to_call, data=None, inventory=None, **kwargs): """ Select a column using all filter parameters and run action defined in function_to_call param1: p_engine: engine name from configuration param2: sortby: sort by output if needed param3: rulesetname: ruleset name param4: envname: environment name param5: metaname: meta name (table or file) param6: columnname: column name (column or field) param7: filter_algname: algorithm name to filter param8: filter_is_masked: is masked fileter param9: algname: new algorithm to set param10: is_masked: set masking False/True param11: domainname: new domain to set param12: function_to_call: function name to call param13: data: output object return 0 action is processed without issues """ ret = 0 logger = logging.getLogger() enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() rulelist = DxRulesetList(envname) connlist = DxConnectorsList(envname) metalist = DxMetaList() rulesetref_list = [] if rulesetname: ruleref = rulelist.get_rulesetId_by_name(rulesetname) if ruleref: rulesetref_list.append(ruleref) else: rulesetref_list = rulelist.get_allref() for ruleref in rulesetref_list: ruleobj = rulelist.get_by_ref(ruleref) connobj = connlist.get_by_ref(ruleobj.connectorId) if connobj: envobj = envlist.get_by_ref(connobj.environment_id) else: envobj = None metalist.LoadMeta(ruleobj.ruleset_id) metasetref_list = [] if metaname: metaref = metalist.get_MetadataId_by_name(metaname, 1) if metaref: metasetref_list.append(metaref) else: metasetref_list = metalist.get_allref() for metaid in metasetref_list: metaobj = metalist.get_by_ref(metaid) collist = DxColumnList() collist.LoadColumns(metadata_id=metaid, is_masked=filter_is_masked) colsetref_list = [] colcount = kwargs.get("colcount") if colcount is not None: colcount.extend(collist.get_allref()) if columnname: colref = collist.get_column_id_by_name(columnname) logger.debug("Column ref with name %s : %s" % (columnname, colref)) if colref: colsetref_list.append(colref) else: colsetref_list = collist.get_allref() logger.debug("List of columns to process : %s" % colsetref_list) if filter_algname: colsetref_masked = collist.get_column_id_by_algorithm( filter_algname) logger.debug("List of columns with algorithm %s : %s" % (filter_algname, colsetref_masked)) colsetref_list = list( set(colsetref_list) & set(colsetref_masked)) logger.debug("Intersection with column name filter %s" % colsetref_masked) for colref in colsetref_list: colobj = collist.get_by_ref(colref) dynfunc = globals()[function_to_call] ret = ret + dynfunc(data=data, engine=engine_tuple, envobj=envobj, ruleobj=ruleobj, metaobj=metaobj, colobj=colobj, algname=algname, is_masked=is_masked, domainname=domainname, inventory=inventory, **kwargs) return ret
def ruleset_listmeta(p_engine, format, rulesetname, envname, metaname): """ List tables/file from ruleset param1: p_engine: engine name from configuration param2: format: output format param3: rulesetname: ruleset name to display metadata from param4: envname: environemnt name to display metadata from param5: metamame: name of table/file to display return 0 if added, non zero for error """ ret = 0 found = False data = DataFormatter() data_header = [ ("Engine name", 30), ("Environent name", 30), ("Ruleset name", 30), ("Metadata type", 15), ("Metadata name", 32) ] data.create_header(data_header) data.format_type = format enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() rulelist = DxRulesetList() rulelist.LoadRulesets(envname) connlist = DxConnectorsList() connlist.LoadConnectors(envname) if rulesetname: rulesetref_list = rulelist.get_all_rulesetId_by_name(rulesetname) if rulesetref_list is None: ret = ret + 1 continue else: rulesetref_list = rulelist.get_allref() if rulesetref_list is None: continue metalist = DxMetaList() for ruleref in rulesetref_list: ruleobj = rulelist.get_by_ref(ruleref) connobj = connlist.get_by_ref(ruleobj.connectorId) if connobj: envobj = envlist.get_by_ref(connobj.environment_id) environment_name = envobj.environment_name else: environment_name = 'N/A' metalist.LoadMeta(ruleobj.ruleset_id) if metaname: metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1) if metalist_ref is None: ret = ret + 1 continue found = True else: metalist_ref = metalist.get_allref() if metalist_ref is None: continue for metaid in metalist_ref: metaobj = metalist.get_by_ref(metaid) data.data_insert( engine_tuple[0], environment_name, ruleobj.ruleset_name, ruleobj.type, metaobj.meta_name ) print("") print (data.data_output(False)) print("") if found: return 0 else: if metaname: print_error("Table or file %s not found" % metaname) return ret
def tab_list_details(p_engine, p_format, rulesetname, envname, metaname, what): """ List details of tables/file from ruleset param1: p_engine: engine name from configuration param2: p_format: output format param3: rulesetname: ruleset name to display metadata from param4: envname: environemnt name to display metadata from param5: metamame: name of table/file to display param6: what - Database/File return 0 if added, non zero for error """ ret = 0 found = False data = DataFormatter() if what == 'Database': data_header = [("Engine name", 30), ("Environent name", 30), ("Ruleset name", 30), ("Table name", 32), ("Logical key", 32), ("Where clause", 50), ("Custom SQL", 50)] else: data_header = [("Engine name", 30), ("Environent name", 30), ("Ruleset name", 30), ("File name", 32), ("File type", 32), ("File format name", 32), ("Delimiter", 10), ("End of record", 10)] data.create_header(data_header) data.format_type = p_format enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1], engine_tuple[2], engine_tuple[3]) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() rulelist = DxRulesetList() rulelist.LoadRulesets(envname) connlist = DxConnectorsList() connlist.LoadConnectors(envname) if rulesetname: rulesetref_all = rulelist.get_all_rulesetId_by_name(rulesetname) if rulesetref_all is None: ret = ret + 1 continue rulesetref_list = [ x for x in rulesetref_all if rulelist.get_by_ref(x).type == what ] if rulesetref_list is None: ret = ret + 1 continue else: if what == 'Database': rulesetref_list = rulelist.get_all_database_rulesetIds() if rulesetref_list is None: continue else: rulesetref_list = rulelist.get_all_file_rulesetIds() if rulesetref_list is None: continue filetypelist = DxFileFormatList() metalist = DxMetaList() for ruleref in rulesetref_list: ruleobj = rulelist.get_by_ref(ruleref) connobj = connlist.get_by_ref(ruleobj.connectorId) if connobj: envobj = envlist.get_by_ref(connobj.environment_id) environment_name = envobj.environment_name else: environment_name = 'N/A' metalist.LoadMeta(ruleobj.ruleset_id) if metaname: metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1) if metalist_ref is None: ret = ret + 1 continue found = True else: metalist_ref = metalist.get_allref() if metalist_ref is None: continue for metaid in metalist_ref: metaobj = metalist.get_by_ref(metaid) if what == 'Database': data.data_insert(engine_tuple[0], environment_name, ruleobj.ruleset_name, metaobj.meta_name, metaobj.key_column, repr(metaobj.where_clause), repr(metaobj.custom_sql)) else: if metaobj.file_format_id is not None: fileformatobj = filetypelist.get_by_ref( metaobj.file_format_id) fileformatname = fileformatobj.file_format_name else: fileformatname = 'N/A' data.data_insert(engine_tuple[0], environment_name, ruleobj.ruleset_name, metaobj.meta_name, metaobj.file_type, fileformatname, metaobj.delimiter, repr(metaobj.end_of_record)) print("") print(data.data_output(False)) print("") if found: return 0 else: if metaname: print_error("Table %s not found" % metaname) return ret
def column_batch(p_engine, rulesetname, envname, inputfile, inventory): """ Update all columns defined in file param1: p_engine: engine name from configuration param2: rulesetname: ruleset name param3: envname: environment name param4: inputfile: file handler with entries return 0 if all rows processed without issues """ ret = 0 logger = logging.getLogger() enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 if inventory is True: mapping = algname_mapping_import() else: mapping = None for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() rulelist = DxRulesetList(envname) metalist = DxMetaList() ruleref = rulelist.get_rulesetId_by_name(rulesetname) if ruleref: ruleobj = rulelist.get_by_ref(ruleref) else: return 1 metalist.LoadMeta(ruleobj.ruleset_id) metacolumn_list = {} for line in inputfile: if line.startswith('#'): continue try: logger.debug("readling line %s" % line) if inventory is False: if ruleobj.type == "Database": (metaname, column_role, parent_column, column_name, type, domain_name, algname, is_masked_YN, idmethod, rowtype, dateformat) \ = line.strip().split(',') else: (metaname, column_name, domain_name, algname, is_masked_YN, priority, recordtype, position, length, dateformat) = line.strip().split(',') else: if ruleobj.type == "Database": (env, ruleset, metaname, column_role, parent_column, column_name, type, domain_name, algname, is_masked_YN, idmethod, rowtype, dateformat) \ = line.strip().split(',') else: (env, ruleset, metaname, column_name, domain_name, algname, is_masked_YN, priority, recordtype, position, length, dateformat) = line.strip().split(',') except ValueError as e: if str(e) == "too many values to unpack": logger.error("to few values in inputfile - maybe add " "--inventory if you are loading an inventory" "file from GUI") print_error("to few values in inputfile - maybe add " "--inventory if you are loading an inventory" "file from GUI") logger.error("line %s" % line) print_error("line %s" % line) ret = ret + 1 break else: logger.error("not all columns in file have value") print_error("not all columns in file have value") logger.error("line %s" % line) print_error("line %s" % line) ret = ret + 1 break metaref = metalist.get_MetadataId_by_name(metaname) if metaref is None: ret = ret + 1 continue metaobj = metalist.get_by_ref(metaref) if metaref not in metacolumn_list: logger.debug("reading columns from engine for %s " % metaname) collist = DxColumnList() collist.LoadColumns(metadata_id=metaref) metacolumn_list[metaref] = collist colref = metacolumn_list[metaref].get_column_id_by_name( column_name) if colref: colobj = metacolumn_list[metaref].get_by_ref(colref) if is_masked_YN == 'Y' or is_masked_YN == 'true': is_masked = True else: is_masked = False if algname == '' or algname == '-': algname = 'None' if domain_name == '' or domain_name == '-': domain_name = 'None' if ruleobj.type == "Database": if idmethod == 'Auto': colobj.is_profiler_writable = True elif idmethod == 'User': colobj.is_profiler_writable = False else: print_error("Wrong id method") return 1 if dateformat == '-': colobj.date_format = None else: colobj.date_format = dateformat if mapping is not None and algname != 'None': try: algname = mapping[algname] except KeyError as e: logger.debug("Wrong algoritm name in input file" ". Not an inventory file ?") logger.debug(str(e)) print_error("Wrong algoritm name in input file" ". Not an inventory file ?") return 1 ret = ret + update_algorithm(colobj=colobj, algname=algname, domainname=domain_name, metaobj=metaobj, ruleobj=ruleobj, is_masked=is_masked) else: ret = ret + 1 continue return ret
def sync_worker(p_engine, objecttype, objectname, envname, function_to_call, **kwargs): """ Run an action for list of syncable objects param1: p_engine: engine name from configuration param2: objecttype: objecttype to list, all if None param3: objectname: objectname to list_table_details param4: function_to_call return 0 if objecttype found """ ret = 0 enginelist = get_list_of_engines(p_engine) # objectname = "RandomValueLookup" # objectname = None ret = 0 if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue synclist = DxSyncList(objecttype) if (objecttype is None or objecttype == "algorithm") \ and envname is None: if objectname: alglist = [objectname] else: alglist = synclist.get_all_algorithms() for syncref in alglist: syncobj = synclist.get_object_by_type_name( "algorithm", syncref) if syncobj: dynfunc = globals()[function_to_call] ret = ret + dynfunc( object=syncobj, engine_obj=engine_obj, envname='global', name=syncref, **kwargs) if objecttype is None or objecttype == "database_connector" \ or objecttype == "file_connector": envlist = DxEnvironmentList() connlist = DxConnectorsList(envname) if objecttype is None: objtypelist = ["database_connector", "file_connector"] else: objtypelist = [objecttype] for objtype in objtypelist: if objectname: connbynameref = connlist.get_connectorId_by_name( objectname, False) if connbynameref: syncconnref = int(connbynameref[1:]) if synclist.get_object_by_type_name( objtype, syncconnref): connrefs = [syncconnref] else: connrefs = [] else: connrefs = [] else: connrefs = synclist.get_all_object_by_type(objtype) for syncref in connrefs: syncobj = synclist.get_object_by_type_name( objtype, syncref) if syncobj.object_type == 'DATABASE_CONNECTOR': connobj = connlist.get_by_ref("d" + str(syncref)) else: connobj = connlist.get_by_ref("f" + str(syncref)) if connobj is None: # limited by env continue envobj = envlist.get_by_ref(connobj.environment_id) dynfunc = globals()[function_to_call] ret = ret + dynfunc( object=syncobj, engine_obj=engine_obj, envname=envobj.environment_name, name=connobj.connector_name, **kwargs) if objecttype is None or objecttype == "database_ruleset" \ or objecttype == "file_ruleset": envlist = DxEnvironmentList() connlist = DxConnectorsList(envname) rulesetList = DxRulesetList(envname) if objecttype is None: objtypelist = ["database_ruleset", "file_ruleset"] else: objtypelist = [objecttype] for objtype in objtypelist: if objectname: rulesetrefs = [] rulesetref = rulesetList.get_all_rulesetId_by_name( objectname) if rulesetref: for rsref in rulesetref: if synclist.get_object_by_type_name( objtype, rsref): rulesetrefs.append(rsref) else: rulesetrefs = [] else: rulesetrefs = [] else: rulesetrefs = synclist.get_all_object_by_type(objtype) for syncref in rulesetrefs: syncobj = synclist.get_object_by_type_name(objtype, syncref) rulesetobj = rulesetList.get_by_ref(syncref) if rulesetobj is None: # limited by env continue connobj = connlist.get_by_ref(rulesetobj.connectorId) envobj = envlist.get_by_ref(connobj.environment_id) dynfunc = globals()[function_to_call] ret = ret + dynfunc( object=syncobj, engine_obj=engine_obj, envname=envobj.environment_name, name=rulesetobj.ruleset_name, **kwargs) if (objecttype is None or objecttype == "global_object" or objecttype == "key" or objecttype == "domain") \ and envname is None: if objecttype is None: objtypelist = ["global_object", "key", "domain"] else: objtypelist = [objecttype] for objtype in objtypelist: if objectname: objlist = [objectname] else: objlist = synclist.get_all_object_by_type(objtype) for syncref in objlist: syncobj = synclist.get_object_by_type_name(objtype, syncref) if syncobj: dynfunc = globals()[function_to_call] ret = ret + dynfunc( object=syncobj, engine_obj=engine_obj, envname='global', name=syncref, **kwargs) if objecttype is None or objecttype == "masking_job": envlist = DxEnvironmentList() joblist = DxJobsList() joblist.LoadJobs(envname) connlist = DxConnectorsList(envname) rulesetlist = DxRulesetList(envname) if objectname: jobref = joblist.get_jobId_by_name(objectname) if synclist.get_object_by_type_name("masking_job", jobref): jobrefs = [jobref] else: jobrefs = [] else: jobrefs = synclist.get_all_object_by_type("masking_job") for syncref in jobrefs: syncobj = synclist.get_object_by_type_name("masking_job", syncref) jobobj = joblist.get_by_ref(syncref) if envname and jobobj is None: # limited by env continue rulesetobj = rulesetlist.get_by_ref(jobobj.ruleset_id) connectorobj = connlist.get_by_ref(rulesetobj.connectorId) envobj = envlist.get_by_ref(connectorobj.environment_id) dynfunc = globals()[function_to_call] ret = ret + dynfunc( object=syncobj, engine_obj=engine_obj, envname=envobj.environment_name, name=jobobj.job_name, **kwargs) return ret
def LoadConnectors(self, environment_name): """ Load all connectors :param1 environment_name: Limit load to particular environment name """ self.__logger.debug("load connector !!!") if self.__loaded_engine is None: self.__loaded_engine = self.__engine.get_name() if self.__loaded_engine == self.__engine.get_name() and self.__connectorsList != {} \ and self.__loaded_env == environment_name: return None else: # delete a list as we can have multi engines self.__connectorsList.clear() self.__loaded_engine = self.__engine.get_name() if (self.__engine.version_ge('6.0.0')): from masking_api_60.api.database_connector_api import DatabaseConnectorApi from masking_api_60.api.file_connector_api import FileConnectorApi from masking_api_60.rest import ApiException else: from masking_api_53.api.database_connector_api import DatabaseConnectorApi from masking_api_53.api.file_connector_api import FileConnectorApi from masking_api_53.rest import ApiException self.__api = DatabaseConnectorApi self.__fileapi = FileConnectorApi self.__loaded_env = environment_name self.__apiexc = ApiException try: api_instance = self.__api(self.__engine.api_client) if environment_name: environment_id = DxEnvironmentList.get_environmentId_by_name( environment_name) if environment_id: dbconnectors = paginator( api_instance, "get_all_database_connectors", environment_id=environment_id, _request_timeout=self.__engine.get_timeout()) else: return 1 else: environment_id = None dbconnectors = paginator( api_instance, "get_all_database_connectors", _request_timeout=self.__engine.get_timeout()) if dbconnectors.response_list: for c in dbconnectors.response_list: if (c.database_type == 'ORACLE'): connector = OracleConnector(self.__engine) elif (c.database_type == 'MSSQL'): connector = MSSQLConnector(self.__engine) elif (c.database_type == 'SYBASE'): connector = SybaseConnector(self.__engine) else: connector = DxConnector(self.__engine) connector.from_connector(c) connector.is_database = True self.__connectorsList['d' + str(c.database_connector_id)] \ = connector else: self.__logger.debug("No database connectors found") api_instance = self.__fileapi(self.__engine.api_client) if environment_id: file_connectors = paginator( api_instance, "get_all_file_connectors", environment_id=environment_id, _request_timeout=self.__engine.get_timeout()) else: file_connectors = paginator( api_instance, "get_all_file_connectors", _request_timeout=self.__engine.get_timeout()) if file_connectors.response_list: for f in file_connectors.response_list: connector = DxFileConnector(self.__engine) connector.from_connector(f) connector.is_database = False self.__connectorsList['f' + str(f.file_connector_id)] \ = connector else: self.__logger.debug("No file connectors found") if len(self.__connectorsList) < 1: print_error("No connectors found") self.__logger.error("No connectors found") return 1 return None except self.__apiexc as e: print_error(e.body) self.__logger.error(e.body) return 1
def tab_selector(p_engine, rulesetname, envname, metaname, function_to_call, params): """ List details of tables/file from ruleset param1: p_engine: engine name from configuration param2: p_format: output format param3: rulesetname: ruleset name to display metadata from param4: envname: environemnt name to display metadata from param5: metamame: name of table/file to display param6: what - Database/File return 0 if added, non zero for error """ ret = 0 update = False enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1], engine_tuple[2], engine_tuple[3]) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() rulelist = DxRulesetList() rulelist.LoadRulesets(envname) if rulesetname: ruleref = rulelist.get_rulesetId_by_name(rulesetname) else: ruleref = None metalist = DxMetaList() metalist.LoadMeta(ruleset_id=ruleref) metaref = metalist.get_MetadataId_by_name(metaname) if metaref: metaobj = metalist.get_by_ref(metaref) else: ret = ret + 1 continue param_map = { "custom_sql": "custom_sql", "where_clause": "where_clause", "having_clause": "having_clause", "key_column": "key_column", "file_format": "file_format_id", "file_delimiter": "delimiter", "file_eor": "end_of_record", "file_enclosure": "enclosure", "file_name_regex": "name_is_regular_expression" } eor = params["file_eor"] if eor == 'custom': if params["file_eor_custom"]: params["file_eor"] = params["file_eor_custom"] else: print_error("Custom End of record is unknown") return 1 for p in param_map.keys(): if params[p]: if hasattr(metaobj, param_map[p]): update = True value = params[p] if value == '': value = None setattr(metaobj, param_map[p], value) if update: ret = ret + metaobj.update() return ret