def do_print_meta(**kwargs): connref = kwargs.get('connref') connlist = kwargs.get('connlist') engine_obj = kwargs.get('engine_obj') format = kwargs.get('format') connobj = connlist.get_by_ref(connref) if connobj.is_database: metaname = 'Table name' else: metaname = 'File name' data = DataFormatter() data_header = [("Engine name", 30), ("Connector name", 30), (metaname, 40)] data.create_header(data_header) data.format_type = format metalist = connobj.fetch_meta() if metalist: for meta_item in metalist: data.data_insert(engine_obj.get_name(), connobj.connector_name, meta_item) else: print_error("List of tables/files is empty") return 1 print("") print(data.data_output(False)) print("") return 0
def engine_list(p_engine, p_username, p_format): """ List Masking engines from configuration file param1: p_engine: name of Masking engine param2: p_username: username param3: p_format: output format return None if OK or integer with error, ex. no rows found """ data = DataFormatter() data_header = [("Engine name", 30), ("IP", 30), ("username", 30), ("protocol", 8), ("port", 5), ("default", 7), ("proxy URL", 30), ("proxy user", 30)] data.create_header(data_header) data.format_type = p_format config = DxConfig() config.init_metadata() if p_engine is None: p_engine = 'all' rows = config.get_engine_info(p_engine, p_username) if rows is None: return -1 for row in rows: data.data_insert(row[0], row[1], row[2], row[4], row[5], row[6], row[8], row[9]) print("") print(data.data_output(False)) print("") return None
def driver_list(p_engine, p_username, format, driver_name): """ Print list of file formats param1: p_engine: engine name from configuration param2: format: output format param3: drver_name: driver name return 0 if environment found """ ret = 0 logger = logging.getLogger() logger.debug("driver_name {}".format(driver_name)) enginelist = get_list_of_engines(p_engine, p_username) if enginelist is None: return 1 data = DataFormatter() data_header = [("Engine name", 30), ("Driver name", 30), ("Driver class name", 50), ("Built-in", 10)] data.create_header(data_header) data.format_type = format for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue if not feature_support(engine_obj, "5.3.9"): ret = ret + 1 continue driver_list = DxJDBCList() # load all objects if driver_name: drivers = driver_list.get_all_driver_id_by_name(driver_name) if drivers is None: ret = ret + 1 else: drivers = driver_list.get_allref() for driver_ref in drivers: driver_obj = driver_list.get_by_ref(driver_ref) if driver_obj.built_in: builtin = "True" else: builtin = "False" data.data_insert(engine_tuple[0], driver_obj.driver_name, driver_obj.driver_class_name, builtin) print("") print(data.data_output(False)) print("") return ret
def algorithm_list(p_engine, algname): """ Print list of algorithms param1: p_engine: engine name from configuration param2: algname: algname name to list, all if None return 0 if algname found """ ret = 0 logger = logging.getLogger() data = DataFormatter() data_header = [("Engine name", 30), ("Algorithm name", 30), ("Domain name", 32), ("Syncable", 3)] data.create_header(data_header) enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1], engine_tuple[2], engine_tuple[3]) if engine_obj.get_session(): continue alglist = DxAlgorithmList() alglist.LoadAlgorithms() algref_list = [] if algname: algobj = alglist.get_by_ref(algname) if algobj: algref_list.append(algobj.algorithm_name) else: algref_list = alglist.get_allref() for algref in algref_list: algobj = alglist.get_by_ref(algref) if algobj.sync: syncable = 'Y' else: syncable = 'N' data.data_insert(engine_tuple[0], algobj.algorithm_name, 'domain', syncable) algobj.export() print("") print(data.data_output(False)) print("") return ret
def domain_list(p_engine, format, domainname): """ Print list of algorithms param1: p_engine: engine name from configuration param2: domainname: domain name to list, all if None return 0 if domain name found """ ret = 0 logger = logging.getLogger() data = DataFormatter() data_header = [ ("Engine name", 30), ("Domain name", 32) ] data.format_type = format data.create_header(data_header) enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue domainlist = DxDomainList() domainref_list = [] if domainname: domobj = domainlist.get_by_ref(domainname) if domobj: domainref_list.append(domobj.domain_name) else: print_error("Domain {} not found".format(domainname)) return 1 else: domainref_list = domainlist.get_allref() for domainref in domainref_list: domobj = domainlist.get_by_ref(domainref) data.data_insert( engine_tuple[0], domobj.domain_name, ) print("") print (data.data_output(False)) print("") return ret
def environment_list(p_engine, format, envname): """ Print list of environments param1: p_engine: engine name from configuration param2: format: output format param3: envname: environemnt name to list, all if None return 0 if environment found """ ret = 0 enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 data = DataFormatter() data_header = [ ("Engine name", 30), ("Environment name", 30), ("Application name", 30) ] data.create_header(data_header) data.format_type = format for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue envlist = DxEnvironmentList() # load all objects envlist.LoadEnvironments() if envname is None: environments = envlist.get_allref() else: environment = envlist.get_environmentId_by_name(envname) if environment is None: ret = ret + 1 continue environments = [environment] for envref in environments: envobj = envlist.get_by_ref(envref) data.data_insert( engine_tuple[0], envobj.environment_name, envobj.application ) print("") print (data.data_output(False)) print("") return ret
def application_list(p_engine, format, appname): """ Print list of applications param1: p_engine: engine name from configuration param2: format: output format param3: appname: application name to list, all if None return 0 if application found """ ret = 0 enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 data = DataFormatter() data_header = [ ("Engine name", 30), ("Application name", 30), ] data.create_header(data_header) data.format_type = format for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue applist = DxApplicationList() # load all objects applist.LoadApplications() if appname is None: applications = applist.get_allref() else: applications = applist.get_applicationId_by_name(appname) if len(applications) == 0: ret = ret + 1 for appref in applications: appobj = applist.get_by_ref(appref) data.data_insert( engine_tuple[0], appobj.application_name ) print("") print (data.data_output(False)) print("") return ret
def role_list(p_engine, p_username, format, rolename): """ Print list of roles param1: p_engine: engine name from configuration param2: format: output format param3: rolename: role name to list, all if None return 0 if role found """ ret = 0 enginelist = get_list_of_engines(p_engine, p_username) if enginelist is None: return 1 data = DataFormatter() data_header = [("Engine name", 30), ("Role name", 30)] data.create_header(data_header) data.format_type = format for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue rolelist = DxRoleList() # load all objects if rolename is None: roles = rolelist.get_allref() else: role = rolelist.get_roleId_by_name(rolename) if role is None: ret = ret + 1 continue roles = [role] for roleref in roles: roleobj = rolelist.get_by_ref(roleref) data.data_insert(engine_tuple[0], roleobj.role_name) print("") print(data.data_output(False)) print("") return ret
def user_list(p_engine, format, username): """ Print list of users param1: p_engine: engine name from configuration param2: format: output format param3: username: user name to list, all if None return 0 if role found """ ret = 0 enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 data = DataFormatter() data_header = [("Engine name", 30), ("User name", 30), ("First name", 30), ("Last name", 30), ("E-mail", 30), ("Auth type", 10), ("Principal", 30), ("Role name", 30), ("Locked", 6), ("Environment list", 30)] data.create_header(data_header) data.format_type = format for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue userlist = DxUserList() rolelist = DxRoleList() envlist = DxEnvironmentList() # check if ldap is configured appsettingslist = DxAppSettingList() ldapobj = appsettingslist.get_appSetting_by_group_and_name( 'ldap', 'Enable') if ldapobj.setting_value == 'true': msadobj = appsettingslist.get_appSetting_by_group_and_name( 'ldap', 'MsadDomain') else: msadobj = None if username is None: users = userlist.get_allref() else: user = userlist.get_userId_by_name(username) if user is None: ret = ret + 1 continue users = [user] for userref in users: userobj = userlist.get_by_ref(userref) if not userobj.is_admin: roleobj = rolelist.get_by_ref( userobj.non_admin_properties.role_id) if roleobj is not None: rolename = roleobj.role_name else: rolename = 'Not Found' else: rolename = 'Administrator' if userobj.non_admin_properties is not None: if userobj.non_admin_properties.environment_ids: envs = ';'.join([ envlist.get_by_ref(x).environment_name for x in userobj.non_admin_properties.environment_ids ]) else: envs = '' else: envs = '' if userobj.is_locked: locked = 'Locked' else: locked = 'Open' if msadobj is None: authtype = 'NATIVE' principal = '' else: authtype = 'LDAP' principal = '{}@{}'.format(userobj.user_name, msadobj.setting_value) data.data_insert(engine_tuple[0], userobj.user_name, userobj.first_name, userobj.last_name, userobj.email, authtype, principal, rolename, locked, envs) print("") print(data.data_output(False)) print("") return ret
def jobs_report_worker(p_engine, p_username, jobname, envname, p_format, last, startdate, enddate, details, jobtype='masking'): """ Print report of jobs param1: p_engine: engine name from configuration param2: jobname: job name to list param3: envname: environemnt name to list jobs from param4: p_format: output format param5: last: display last job only param6: startdate: filter by start date param7: enddate: filter by end date param8: details param9: joblist_class - DxJobsList, DxProfileJobslist return 0 if environment found """ ret = 0 logger = logging.getLogger() enginelist = get_list_of_engines(p_engine, p_username) if enginelist is None: return 1 data = DataFormatter() if jobtype == 'masking': if details: data_header = [ ("Engine name", 30), ("Environment name", 30), ("Job name", 30), ("ExecId", 6), ("Meta name", 12), ("Masked Rows", 10), ("Started", 20), ("Completed", 20), ("Status", 20), ("Runtime", 20) ] else: data_header = [ ("Engine name", 30), ("Environment name", 30), ("Job name", 30), ("Job Id", 6), ("Min Memory", 10), ("Max Memory", 10), ("Streams", 7), ("On The Fly", 10), ("Ruleset Type", 12), ("ExecId", 6), ("Total Rows", 10), ("Masked Rows", 10), ("Started", 20), ("Completed", 20), ("Status", 20), ("Runtime", 20) ] else: data_header = [ ("Engine name", 30), ("Environment name", 30), ("Job name", 30), ("Ruleset Type", 12), ("ExecId", 6), ("Started", 20), ("Completed", 20), ("Status", 20), ("Runtime", 20) ] data.create_header(data_header) data.format_type = p_format for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue # load all objects envlist = DxEnvironmentList() envlist.LoadEnvironments() rulesetlist = DxRulesetList(envname) connectorlist = DxConnectorsList(envname) if jobtype == 'masking': joblist = DxJobsList() else: joblist = DxProfileJobsList() logger.debug("Envname is %s, job name is %s" % (envname, jobname)) joblist.LoadJobs(envname) #rulesetlist.LoadRulesets(envname) #connectorlist.LoadConnectors(envname) if jobname is None: jobs = joblist.get_allref() else: jobs = joblist.get_all_jobId_by_name(jobname) if jobs is None: ret = ret + 1 continue for jobref in jobs: jobobj = joblist.get_by_ref(jobref) rulesetobj = rulesetlist.get_by_ref(jobobj.ruleset_id) # those test are requierd for 5.X engies where API is not showing all types of connectors if rulesetobj is not None: ruleset_type = rulesetobj.type rulename = rulesetobj.ruleset_name connectorobj = connectorlist.get_by_ref(rulesetobj.connectorId) if connectorobj is not None: connectorname = connectorobj.connector_name envobj = envlist.get_by_ref(connectorobj.environment_id) if envobj is not None: envobjname = envobj.environment_name else: envobjname = "N/A" else: connectorname = "N/A" envobjname = "N/A" else: rulename = "N/A" connectorname = "N/A" envobjname = "N/A" ruleset_type = "N/A" if last: lastonly = True else: lastonly = False if lastonly: execlist = [ jobobj.lastExec ] else: if startdate or enddate: execlist = jobobj.filter_executions(startdate, enddate) else: execlist = jobobj.execList if execlist: for jobexec in execlist: if details == False: if jobtype == 'masking': if jobexec is not None: status = jobexec.status execid = jobexec.execution_id rowsmasked = jobexec.rows_masked rowstotal = jobexec.rows_total if jobexec.start_time is not None: starttime = jobexec.start_time.strftime("%Y-%m-%d %H:%M:%S") else: starttime = 'N/A' if (jobexec.end_time is not None) and \ (jobexec.start_time is not None): endtime = jobexec.end_time \ .strftime("%Y-%m-%d %H:%M:%S") runtimetemp = jobexec.end_time \ - jobexec.start_time runtime = str(runtimetemp) else: endtime = 'N/A' runtime = 'N/A' else: status = 'N/A' endtime = 'N/A' starttime = 'N/A' runtime = 'N/A' execid = 'N/A' rowsmasked = 'N/A' rowstotal = 'N/A' data.data_insert( engine_tuple[0], envobjname, jobobj.job_name, jobobj.masking_job_id, jobobj.min_memory, jobobj.max_memory, jobobj.num_input_streams, jobobj.on_the_fly_masking, ruleset_type, execid, rowstotal, rowsmasked, starttime, endtime, status, runtime ) else: if jobexec is not None: status = jobexec.status execid = jobexec.execution_id if jobexec.start_time is not None: starttime = jobexec.start_time.strftime("%Y-%m-%d %H:%M:%S") else: starttime = 'N/A' if (jobexec.end_time is not None) and \ (jobexec.start_time is not None): endtime = jobexec.end_time \ .strftime("%Y-%m-%d %H:%M:%S") runtimetemp = jobexec.end_time \ - jobexec.start_time runtime = str(runtimetemp) else: endtime = 'N/A' runtime = 'N/A' else: status = 'N/A' endtime = 'N/A' starttime = 'N/A' runtime = 'N/A' execid = 'N/A' data.data_insert( engine_tuple[0], envobjname, jobobj.job_name, ruleset_type, execid, starttime, endtime, status, runtime ) else: # details here if jobexec is not None: execid = jobexec.execution_id complist = jobobj.list_execution_component(execid) if complist is not None: for comp in complist: status = comp.status rowsmasked = comp.rows_masked metaname = comp.component_name if comp.start_time is not None: starttime = comp.start_time.strftime("%Y-%m-%d %H:%M:%S") else: starttime = 'N/A' if (comp.end_time is not None) and \ (comp.start_time is not None): endtime = comp.end_time \ .strftime("%Y-%m-%d %H:%M:%S") runtimetemp = comp.end_time \ - comp.start_time runtime = str(runtimetemp) else: endtime = 'N/A' runtime = 'N/A' data.data_insert( engine_tuple[0], envobjname, jobobj.job_name, execid, metaname, rowsmasked, starttime, endtime, status, runtime ) else: print("setting 1") ret = 1 else: # no executions ret = 1 else: # no executions ret = 1 print("") print (data.data_output(False)) print("") return ret
def jobs_list_worker(p_engine, p_username, jobname, envname, p_format, joblist_class): """ Print list of jobs param1: p_engine: engine name from configuration param2: jobname: job name to list param3: envname: environemnt name to list jobs from param4: p_format: output format param5: joblist_class - DxJobsList, DxProfileJobslist return 0 if environment found """ ret = 0 logger = logging.getLogger() enginelist = get_list_of_engines(p_engine, p_username) if enginelist is None: return 1 data = DataFormatter() data_header = [ ("Engine name", 30), ("Job name", 30), ("Ruleset name", 30), ("Connector name", 30), ("Environment name", 30), ("Completed", 20), ("Status", 20), ("Runtime", 20) ] data.create_header(data_header) data.format_type = p_format for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue # load all objects envlist = DxEnvironmentList() envlist.LoadEnvironments() rulesetlist = DxRulesetList(envname) connectorlist = DxConnectorsList(envname) joblist = globals()[joblist_class]() logger.debug("Envname is %s, job name is %s" % (envname, jobname)) joblist.LoadJobs(envname) #rulesetlist.LoadRulesets(envname) #connectorlist.LoadConnectors(envname) if jobname is None: jobs = joblist.get_allref() else: jobs = joblist.get_all_jobId_by_name(jobname) if jobs is None: ret = ret + 1 continue for jobref in jobs: jobobj = joblist.get_by_ref(jobref) rulesetobj = rulesetlist.get_by_ref(jobobj.ruleset_id) # those test are requierd for 5.X engies where API is not showing all types of connectors if rulesetobj is not None: rulename = rulesetobj.ruleset_name connectorobj = connectorlist.get_by_ref(rulesetobj.connectorId) if connectorobj is not None: connectorname = connectorobj.connector_name envobj = envlist.get_by_ref(connectorobj.environment_id) if envobj is not None: envobjname = envobj.environment_name else: envobjname = "N/A" else: connectorname = "N/A" envobjname = "N/A" else: rulename = "N/A" connectorname = "N/A" envobjname = "N/A" if jobobj.lastExec is not None: status = jobobj.lastExec.status if (jobobj.lastExec.end_time is not None) and \ (jobobj.lastExec.end_time is not None): endtime = jobobj.lastExec.end_time \ .strftime("%Y-%m-%d %H:%M:%S") runtimetemp = jobobj.lastExec.end_time \ - jobobj.lastExec.start_time runtime = str(runtimetemp) else: endtime = 'N/A' runtime = 'N/A' else: status = 'N/A' endtime = 'N/A' runtime = 'N/A' data.data_insert( engine_tuple[0], jobobj.job_name, rulename, connectorname, envobjname, endtime, status, runtime ) print("") print (data.data_output(False)) print("") return ret
def connector_list(p_engine, format, envname, connector_name, details): """ Print list of connectors param1: p_engine: engine name from configuration param2: format: output format param3: envname: environemnt name filter for connectors param4: connector_name: connector name to list param5: details: print connector details return 0 if connector found """ ret = 0 enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 data = DataFormatter() if details: data_header = [("Engine name", 30), ("Environment name", 30), ("Connector name", 30), ("Connector type", 15), ("Hostname", 30), ("Port", 5), ("Schema name", 30), ("Type depended", 100)] else: data_header = [("Engine name", 30), ("Environment name", 30), ("Connector name", 30), ("Connector type", 15)] data.create_header(data_header) data.format_type = format for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue # connlist = DxConnectorsList() # envlist = DxEnvironmentList() # envlist.LoadEnvironments() # connlist.LoadConnectors(envname) DxConnectorsList(envname) if connector_name is None: connectors = DxConnectorsList.get_allref() else: connectors = DxConnectorsList.get_all_connectorId_by_name( connector_name) if connectors is None: ret = ret + 1 continue for connref in connectors: connobj = DxConnectorsList.get_by_ref(connref) if details: rest = ''.join([ '%s = %s ' % (key, value) for (key, value) in connobj.get_type_properties().items() ]) data.data_insert( engine_tuple[0], DxEnvironmentList.get_by_ref( connobj.environment_id).environment_name, connobj.connector_name, connobj.connector_type, connobj.host, connobj.port, connobj.schema_name, rest) else: data.data_insert( engine_tuple[0], DxEnvironmentList.get_by_ref( connobj.environment_id).environment_name, connobj.connector_name, connobj.connector_type) print("") print(data.data_output(False)) print("") return ret
def fileformat_list(p_engine, format, fileformat_type, fileformat_name): """ Print list of file formats param1: p_engine: engine name from configuration param2: format: output format param3: fileformat_type: file format type param4: fileformat_name: file format name return 0 if environment found """ ret = 0 logger = logging.getLogger() logger.debug("fileformat type %s fileformat name %s" % (fileformat_type, fileformat_name)) enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 data = DataFormatter() data_header = [("Engine name", 30), ("File format type", 30), ("File format name", 30)] data.create_header(data_header) data.format_type = format for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1], engine_tuple[2], engine_tuple[3]) if engine_obj.get_session(): continue fileformatList = DxFileFormatList() # load all objects if fileformat_name: fileformats_name = fileformatList.get_all_file_format_id_by_name( fileformat_name) if fileformats_name is None: ret = ret + 1 fileformats_name = [] else: fileformats_name = fileformatList.get_allref() if fileformat_type: fileformats_type = fileformatList.get_all_file_format_id_by_type( fileformat_type) if fileformats_type is None: ret = ret + 1 fileformats_type = [] else: fileformats_type = fileformats_name fileformats = list(set(fileformats_name) & set(fileformats_type)) if fileformats: for fileformatref in fileformats: fileformatobj = fileformatList.get_by_ref(fileformatref) data.data_insert(engine_tuple[0], fileformatobj.file_format_type, fileformatobj.file_format_name) else: if fileformat_type and fileformat_name: ret = ret + 1 print("") print(data.data_output(False)) print("") return ret
def ruleset_listmeta(p_engine, format, rulesetname, envname, metaname): """ List tables/file from ruleset param1: p_engine: engine name from configuration param2: format: output format param3: rulesetname: ruleset name to display metadata from param4: envname: environemnt name to display metadata from param5: metamame: name of table/file to display return 0 if added, non zero for error """ ret = 0 found = False data = DataFormatter() data_header = [ ("Engine name", 30), ("Environent name", 30), ("Ruleset name", 30), ("Metadata type", 15), ("Metadata name", 32) ] data.create_header(data_header) data.format_type = format enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() rulelist = DxRulesetList() rulelist.LoadRulesets(envname) connlist = DxConnectorsList() connlist.LoadConnectors(envname) if rulesetname: rulesetref_list = rulelist.get_all_rulesetId_by_name(rulesetname) if rulesetref_list is None: ret = ret + 1 continue else: rulesetref_list = rulelist.get_allref() if rulesetref_list is None: continue metalist = DxMetaList() for ruleref in rulesetref_list: ruleobj = rulelist.get_by_ref(ruleref) connobj = connlist.get_by_ref(ruleobj.connectorId) if connobj: envobj = envlist.get_by_ref(connobj.environment_id) environment_name = envobj.environment_name else: environment_name = 'N/A' metalist.LoadMeta(ruleobj.ruleset_id) if metaname: metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1) if metalist_ref is None: ret = ret + 1 continue found = True else: metalist_ref = metalist.get_allref() if metalist_ref is None: continue for metaid in metalist_ref: metaobj = metalist.get_by_ref(metaid) data.data_insert( engine_tuple[0], environment_name, ruleobj.ruleset_name, ruleobj.type, metaobj.meta_name ) print("") print (data.data_output(False)) print("") if found: return 0 else: if metaname: print_error("Table or file %s not found" % metaname) return ret
def tab_list_details(p_engine, p_format, rulesetname, envname, metaname, what): """ List details of tables/file from ruleset param1: p_engine: engine name from configuration param2: p_format: output format param3: rulesetname: ruleset name to display metadata from param4: envname: environemnt name to display metadata from param5: metamame: name of table/file to display param6: what - Database/File return 0 if added, non zero for error """ ret = 0 found = False data = DataFormatter() if what == 'Database': data_header = [("Engine name", 30), ("Environent name", 30), ("Ruleset name", 30), ("Table name", 32), ("Logical key", 32), ("Where clause", 50), ("Custom SQL", 50)] else: data_header = [("Engine name", 30), ("Environent name", 30), ("Ruleset name", 30), ("File name", 32), ("File type", 32), ("File format name", 32), ("Delimiter", 10), ("End of record", 10)] data.create_header(data_header) data.format_type = p_format enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1], engine_tuple[2], engine_tuple[3]) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() rulelist = DxRulesetList() rulelist.LoadRulesets(envname) connlist = DxConnectorsList() connlist.LoadConnectors(envname) if rulesetname: rulesetref_all = rulelist.get_all_rulesetId_by_name(rulesetname) if rulesetref_all is None: ret = ret + 1 continue rulesetref_list = [ x for x in rulesetref_all if rulelist.get_by_ref(x).type == what ] if rulesetref_list is None: ret = ret + 1 continue else: if what == 'Database': rulesetref_list = rulelist.get_all_database_rulesetIds() if rulesetref_list is None: continue else: rulesetref_list = rulelist.get_all_file_rulesetIds() if rulesetref_list is None: continue filetypelist = DxFileFormatList() metalist = DxMetaList() for ruleref in rulesetref_list: ruleobj = rulelist.get_by_ref(ruleref) connobj = connlist.get_by_ref(ruleobj.connectorId) if connobj: envobj = envlist.get_by_ref(connobj.environment_id) environment_name = envobj.environment_name else: environment_name = 'N/A' metalist.LoadMeta(ruleobj.ruleset_id) if metaname: metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1) if metalist_ref is None: ret = ret + 1 continue found = True else: metalist_ref = metalist.get_allref() if metalist_ref is None: continue for metaid in metalist_ref: metaobj = metalist.get_by_ref(metaid) if what == 'Database': data.data_insert(engine_tuple[0], environment_name, ruleobj.ruleset_name, metaobj.meta_name, metaobj.key_column, repr(metaobj.where_clause), repr(metaobj.custom_sql)) else: if metaobj.file_format_id is not None: fileformatobj = filetypelist.get_by_ref( metaobj.file_format_id) fileformatname = fileformatobj.file_format_name else: fileformatname = 'N/A' data.data_insert(engine_tuple[0], environment_name, ruleobj.ruleset_name, metaobj.meta_name, metaobj.file_type, fileformatname, metaobj.delimiter, repr(metaobj.end_of_record)) print("") print(data.data_output(False)) print("") if found: return 0 else: if metaname: print_error("Table %s not found" % metaname) return ret