def columns_copy(engine_obj, meta_id, new_meta_id): """ Copy columns from one meta object to other one :param1 engine_obj: Masking engine :param2 meta_id: source meta id :param3 new_meta_id: destination meta id return 0 if OK """ ret = 0 logger = logging.getLogger() collist = DxColumnList() if collist.LoadColumns(metadata_id=meta_id, is_masked=True) == 1: logger.debug("Problem with loading masked columns for meta %s" % meta_id) return 1 newcollist = DxColumnList() if newcollist.LoadColumns(metadata_id=new_meta_id) == 1: logger.debug("Problem with loading columns for new meta %s" % new_meta_id) return 1 for colref in collist.get_allref(): colobj = collist.get_by_ref(colref) newcolref = newcollist.get_column_id_by_name(colobj.cf_meta_name) newcol = newcollist.get_by_ref(newcolref) if type(newcol) == DxDBColumn: newcol.from_column(colobj) newcol.table_metadata_id = new_meta_id newcol.column_metadata_id = newcolref else: newcol.from_file(colobj) newcol.file_field_metadata_id = newcolref newcol.file_format_id = new_meta_id if newcol.update(): ret = ret + 1 return ret
def column_batch(p_engine, rulesetname, envname, inputfile, inventory): """ Update all columns defined in file param1: p_engine: engine name from configuration param2: rulesetname: ruleset name param3: envname: environment name param4: inputfile: file handler with entries return 0 if all rows processed without issues """ ret = 0 logger = logging.getLogger() enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 if inventory is True: mapping = algname_mapping_import() else: mapping = None for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() rulelist = DxRulesetList(envname) metalist = DxMetaList() ruleref = rulelist.get_rulesetId_by_name(rulesetname) if ruleref: ruleobj = rulelist.get_by_ref(ruleref) else: return 1 metalist.LoadMeta(ruleobj.ruleset_id) metacolumn_list = {} for line in inputfile: if line.startswith('#'): continue try: logger.debug("readling line %s" % line) if inventory is False: if ruleobj.type == "Database": (metaname, column_role, parent_column, column_name, type, domain_name, algname, is_masked_YN, idmethod, rowtype, dateformat) \ = line.strip().split(',') else: (metaname, column_name, domain_name, algname, is_masked_YN, priority, recordtype, position, length, dateformat) = line.strip().split(',') else: if ruleobj.type == "Database": (env, ruleset, metaname, column_role, parent_column, column_name, type, domain_name, algname, is_masked_YN, idmethod, rowtype, dateformat) \ = line.strip().split(',') else: (env, ruleset, metaname, column_name, domain_name, algname, is_masked_YN, priority, recordtype, position, length, dateformat) = line.strip().split(',') except ValueError as e: if str(e) == "too many values to unpack": logger.error("to few values in inputfile - maybe add " "--inventory if you are loading an inventory" "file from GUI") print_error("to few values in inputfile - maybe add " "--inventory if you are loading an inventory" "file from GUI") logger.error("line %s" % line) print_error("line %s" % line) ret = ret + 1 break else: logger.error("not all columns in file have value") print_error("not all columns in file have value") logger.error("line %s" % line) print_error("line %s" % line) ret = ret + 1 break metaref = metalist.get_MetadataId_by_name(metaname) if metaref is None: ret = ret + 1 continue metaobj = metalist.get_by_ref(metaref) if metaref not in metacolumn_list: logger.debug("reading columns from engine for %s " % metaname) collist = DxColumnList() collist.LoadColumns(metadata_id=metaref) metacolumn_list[metaref] = collist colref = metacolumn_list[metaref].get_column_id_by_name( column_name) if colref: colobj = metacolumn_list[metaref].get_by_ref(colref) if is_masked_YN == 'Y' or is_masked_YN == 'true': is_masked = True else: is_masked = False if algname == '' or algname == '-': algname = 'None' if domain_name == '' or domain_name == '-': domain_name = 'None' if ruleobj.type == "Database": if idmethod == 'Auto': colobj.is_profiler_writable = True elif idmethod == 'User': colobj.is_profiler_writable = False else: print_error("Wrong id method") return 1 if dateformat == '-': colobj.date_format = None else: colobj.date_format = dateformat if mapping is not None and algname != 'None': try: algname = mapping[algname] except KeyError as e: logger.debug("Wrong algoritm name in input file" ". Not an inventory file ?") logger.debug(str(e)) print_error("Wrong algoritm name in input file" ". Not an inventory file ?") return 1 ret = ret + update_algorithm(colobj=colobj, algname=algname, domainname=domain_name, metaobj=metaobj, ruleobj=ruleobj, is_masked=is_masked) else: ret = ret + 1 continue return ret
def column_worker(p_engine, sortby, rulesetname, envname, metaname, columnname, filter_algname, filter_is_masked, algname, is_masked, domainname, function_to_call, data=None, inventory=None, **kwargs): """ Select a column using all filter parameters and run action defined in function_to_call param1: p_engine: engine name from configuration param2: sortby: sort by output if needed param3: rulesetname: ruleset name param4: envname: environment name param5: metaname: meta name (table or file) param6: columnname: column name (column or field) param7: filter_algname: algorithm name to filter param8: filter_is_masked: is masked fileter param9: algname: new algorithm to set param10: is_masked: set masking False/True param11: domainname: new domain to set param12: function_to_call: function name to call param13: data: output object return 0 action is processed without issues """ ret = 0 logger = logging.getLogger() enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() rulelist = DxRulesetList(envname) connlist = DxConnectorsList(envname) metalist = DxMetaList() rulesetref_list = [] if rulesetname: ruleref = rulelist.get_rulesetId_by_name(rulesetname) if ruleref: rulesetref_list.append(ruleref) else: rulesetref_list = rulelist.get_allref() for ruleref in rulesetref_list: ruleobj = rulelist.get_by_ref(ruleref) connobj = connlist.get_by_ref(ruleobj.connectorId) if connobj: envobj = envlist.get_by_ref(connobj.environment_id) else: envobj = None metalist.LoadMeta(ruleobj.ruleset_id) metasetref_list = [] if metaname: metaref = metalist.get_MetadataId_by_name(metaname, 1) if metaref: metasetref_list.append(metaref) else: metasetref_list = metalist.get_allref() for metaid in metasetref_list: metaobj = metalist.get_by_ref(metaid) collist = DxColumnList() collist.LoadColumns(metadata_id=metaid, is_masked=filter_is_masked) colsetref_list = [] colcount = kwargs.get("colcount") if colcount is not None: colcount.extend(collist.get_allref()) if columnname: colref = collist.get_column_id_by_name(columnname) logger.debug("Column ref with name %s : %s" % (columnname, colref)) if colref: colsetref_list.append(colref) else: colsetref_list = collist.get_allref() logger.debug("List of columns to process : %s" % colsetref_list) if filter_algname: colsetref_masked = collist.get_column_id_by_algorithm( filter_algname) logger.debug("List of columns with algorithm %s : %s" % (filter_algname, colsetref_masked)) colsetref_list = list( set(colsetref_list) & set(colsetref_masked)) logger.debug("Intersection with column name filter %s" % colsetref_masked) for colref in colsetref_list: colobj = collist.get_by_ref(colref) dynfunc = globals()[function_to_call] ret = ret + dynfunc(data=data, engine=engine_tuple, envobj=envobj, ruleobj=ruleobj, metaobj=metaobj, colobj=colobj, algname=algname, is_masked=is_masked, domainname=domainname, inventory=inventory, **kwargs) return ret
def column_batch(p_engine, p_username, rulesetname, envname, inputfile, inventory): """ Update all columns defined in file param1: p_engine: engine name from configuration param2: rulesetname: ruleset name param3: envname: environment name param4: inputfile: file handler with entries return 0 if all rows processed without issues """ ret = 0 logger = logging.getLogger() enginelist = get_list_of_engines(p_engine, p_username) if enginelist is None: return 1 if inventory is True: mapping = algname_mapping_import() else: mapping = None database_list = "metaname column_role parent_column column_name \ type domain_name algname is_masked_YN idmethod rowtype dateformat" file_list = "metaname column_name domain_name algname \ is_masked_YN priority recordtype position \ length dateformat" inventory_addition = "env ruleset" multicolumn_addition = "fieldid groupid" for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() rulelist = DxRulesetList(envname) metalist = DxMetaList() alg_list = DxAlgorithmList() ruleref = rulelist.get_rulesetId_by_name(rulesetname) if ruleref: ruleobj = rulelist.get_by_ref(ruleref) else: return 1 metalist.LoadMeta(ruleobj.ruleset_id) metacolumn_list = {} setversion = False for line in inputfile: if not setversion: setversion = True if ruleobj.type == "Database": collist = database_list else: collist = file_list if inventory is True: collist = inventory_addition + " " + collist if "Multi-Column" in line: # we have a 6.0.8 or higher inventory if inventory is True: collist = collist + " notes " + multicolumn_addition else: collist = collist + " " + multicolumn_addition linetype = namedtuple("linetype", collist) if line.startswith('#'): continue try: logger.debug("readling line %s" % line) lineobj = linetype(*line.strip().split(',')) except ValueError as e: if str(e) == "too many values to unpack": logger.error("to few values in inputfile - maybe add " "--inventory if you are loading an inventory" "file from GUI") print_error("to few values in inputfile - maybe add " "--inventory if you are loading an inventory" "file from GUI") logger.error("line %s" % line) print_error("line %s" % line) ret = ret + 1 break else: logger.error("not all columns in file have value") print_error("not all columns in file have value") logger.error("line %s" % line) print_error("line %s" % line) ret = ret + 1 break metaref = metalist.get_MetadataId_by_name(lineobj.metaname) if metaref is None: ret = ret + 1 continue metaobj = metalist.get_by_ref(metaref) if metaref not in metacolumn_list: logger.debug("reading columns from engine for %s " % lineobj.metaname) collist = DxColumnList() collist.LoadColumns(metadata_id=metaref) metacolumn_list[metaref] = collist colref = metacolumn_list[metaref].get_column_id_by_name( lineobj.column_name) if colref: colobj = metacolumn_list[metaref].get_by_ref(colref) if lineobj.is_masked_YN == 'Y' or lineobj.is_masked_YN == 'true': is_masked = True else: is_masked = False if lineobj.algname == '' or lineobj.algname == '-': algname = 'None' else: algname = lineobj.algname if lineobj.domain_name == '' or lineobj.domain_name == '-': domain_name = 'None' else: domain_name = lineobj.domain_name if hasattr(lineobj, 'fieldid'): if lineobj.fieldid == '' or lineobj.fieldid == '-': fieldid = None else: fieldid = lineobj.fieldid else: fieldid = None if ruleobj.type == "Database": if lineobj.idmethod == 'Auto': colobj.is_profiler_writable = True elif lineobj.idmethod == 'User': colobj.is_profiler_writable = False else: print_error("Wrong id method") return 1 if lineobj.dateformat == '-': colobj.date_format = None else: colobj.date_format = lineobj.dateformat if fieldid is not None: algobj = alg_list.get_by_ref(lineobj.algname) field_id = [ x.field_id for x in algobj.fields if x.name == fieldid ][0] group_id = lineobj.groupid else: field_id = None group_id = None if mapping is not None and algname != 'None' and algname in mapping: logger.debug("changing a name of algorithm for inventory import: from {} to {}".format(algname, mapping[algname])) algname = mapping[algname] ret = ret + update_algorithm(colobj=colobj, algname=algname, domainname=domain_name, metaobj=metaobj, ruleobj=ruleobj, is_masked=is_masked, algorithm_field_id=field_id, algorithm_group_no=group_id) else: ret = ret + 1 continue return ret
def column_batch(p_engine, rulesetname, envname, inputfile): """ Update all columns defined in file param1: p_engine: engine name from configuration param2: rulesetname: ruleset name param3: envname: environment name param4: inputfile: file handler with entries return 0 if all rows processed without issues """ ret = 0 logger = logging.getLogger() enginelist = get_list_of_engines(p_engine) if enginelist is None: return 1 for engine_tuple in enginelist: engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1], engine_tuple[2], engine_tuple[3]) if engine_obj.get_session(): continue envlist = DxEnvironmentList() envlist.LoadEnvironments() rulelist = DxRulesetList() rulelist.LoadRulesets(envname) metalist = DxMetaList() ruleref = rulelist.get_rulesetId_by_name(rulesetname) if ruleref: ruleobj = rulelist.get_by_ref(ruleref) else: return 1 metalist.LoadMeta(ruleobj.ruleset_id) metacolumn_list = {} for line in inputfile: if line.startswith('#'): continue try: logger.debug("readling line %s" % line) if ruleobj.type == "Database": (metaname, column_role, parent_column, column_name, type, domain_name, algname, is_masked_YN) = line.strip().split(',') else: (metaname, column_name, domain_name, algname, is_masked_YN) = line.strip().split(',') except ValueError: logger.error("not all columns in file have value") print_error("not all columns in file have value") logger.error("line %s" % line) print_error("line %s" % line) ret = ret + 1 continue metaref = metalist.get_MetadataId_by_name(metaname) if metaref is None: ret = ret + 1 continue metaobj = metalist.get_by_ref(metaref) if metaref not in metacolumn_list: logger.debug("reading columns from engine for %s " % metaname) collist = DxColumnList() collist.LoadColumns(metadata_id=metaref) metacolumn_list[metaref] = collist colref = metacolumn_list[metaref].get_column_id_by_name( column_name) if colref: colobj = metacolumn_list[metaref].get_by_ref(colref) if is_masked_YN == 'Y': is_masked = True else: is_masked = False if algname == '': algname = 'None' if domain_name == '': domain_name = 'None' update_algorithm(colobj=colobj, algname=algname, domainname=domain_name, metaobj=metaobj, ruleobj=ruleobj, is_masked=is_masked) else: ret = ret + 1 continue return ret