示例#1
0
def do_check(**kwargs):
    """
    Compare
    """

    ruleref = kwargs.get('ruleref')
    rulelist = kwargs.get('rulelist')
    envlist = DxEnvironmentList
    envname = kwargs.get('envname')
    ruleset = kwargs.get('ruleset')
    rulesetname = kwargs.get('rulesetname')
    p_engine = kwargs.get('p_engine')

    connname = ruleset["Connector name"]

    ruleobj = rulelist.get_by_ref(ruleref)
    connobj = DxConnectorsList.get_by_ref(ruleobj.connectorId)

    if connobj:
        envobj = envlist.get_by_ref(connobj.environment_id)
        connector_name = connobj.connector_name
        environment_name = envobj.environment_name
    else:
        connector_name = 'N/A'
        environment_name = 'N/A'

    retcol = 0

    metalist = DxMetaList()
    metalist.LoadMeta(ruleobj.ruleset_id)

    rettab = 0

    for meta in ruleset["Metadata"]:
        metalist_ref = metalist.get_MetadataId_by_name(meta["meta_name"], 1)
        if metalist_ref:
            rettab = rettab + 1
        else:
            print_error("Missing meta %s" % meta["meta_name"])

    for col in ruleset["Columns"]:
        count = [
            x for x in ruleset["Columns"]
            if col["Metadata name"] == x["Metadata name"]
        ]
        rc = column_check(p_engine, rulesetname, envname, col, len(count))
        if rc != 0:
            retcol = retcol + 1


    if (ruleobj.ruleset_name == rulesetname) and \
       (connector_name == connname) and \
       (environment_name == envname) and \
       (retcol == 0) and \
       (rettab == len(ruleset["Metadata"])):
        print_message("Ruleset definition in engine is matching import file")
        return 0
    else:
        print_error("There are difference between engine and import file")
        return 1
示例#2
0
def ruleset_deletemeta(p_engine, rulesetname, metaname, envname):
    """
    Delete meta (file, table) from ruleset to Masking engine
    param1: p_engine: engine name from configuration
    param2: rulesetname: ruleset name
    param3: metaname: metaname to delete
    param4: envname: environment name
    return 0 if added, non 0 for error
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
                                     engine_tuple[2], engine_tuple[3])

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        ruleref = rulelist.get_rulesetId_by_name(rulesetname)

        metalist = DxMetaList()
        metalist.LoadMeta(ruleset_id=ruleref)

        metaref = metalist.get_MetadataId_by_name(metaname)

        if metaref:
            if metalist.delete(metaref):
                ret = ret + 1
        else:
            ret = ret + 1

    return ret
示例#3
0
def do_meta_copy(engine_obj, ruleref, newref):
    """
    Copy a meta objects plus columns from one RS to new one
    param1: p_engine: engine name from configuration
    param2: ruleref: source ruleset
    param3: newref: target ruleset
    return 0 if no issue with copy
    """
    ret = 0
    metalist = DxMetaList()
    metalist.LoadMeta(ruleref)

    for meta_id in metalist.get_allref():
        newmeta_id = metalist.copymeta(meta_id, newref)
        if newmeta_id:
            ret = ret + columns_copy(engine_obj, meta_id, newmeta_id)
        else:
            ret = ret + 1

    return ret
示例#4
0
    def LoadColumns(self, metadata_id=None, is_masked=None):
        """
        Load list of column metadata
        Return None if OK
        """

        notable = None
        nofile = None

        metaobj = DxMetaList.get_by_ref(metadata_id)

        self.__api = ColumnMetadataApi
        self.__fileapi = FileFieldMetadataApi
        self.__apiexc = ApiException

        if ((metadata_id is None) or (type(metaobj) == DxTable)):
            try:
                api_instance = self.__api(self.__engine.api_client)

                if is_masked:
                    if metadata_id:
                        columns = paginator(api_instance,
                                            "get_all_column_metadata",
                                            table_metadata_id=metadata_id,
                                            is_masked=is_masked)
                    else:
                        columns = paginator(api_instance,
                                            "get_all_column_metadata",
                                            is_masked=is_masked)
                else:
                    if metadata_id:
                        columns = paginator(api_instance,
                                            "get_all_column_metadata",
                                            table_metadata_id=metadata_id)
                    else:
                        columns = paginator(api_instance,
                                            "get_all_column_metadata")

                if columns.response_list:
                    for c in columns.response_list:
                        column = DxDBColumn(self.__engine)
                        column.from_column(c)
                        self.__columnList[column.cf_metadata_id] = column
                else:
                    # print_error("No column metadata found")
                    self.__logger.error("No column metadata found")

            except self.__apiexc as e:
                if (e.status == 404) and (metadata_id is not None):
                    notable = 1
                else:
                    print_error(e.body)
                    self.__logger.error(e.body)
                    return 1

        elif ((metadata_id is None) or (type(metaobj) == DxFile)):
            try:
                api_instance = self.__fileapi(self.__engine.api_client)

                if metadata_id and (metaobj.file_format_id is None):
                    # File doesn't have a file type set so there is no masking
                    return None

                if is_masked:
                    if metadata_id:
                        fields = paginator(
                            api_instance,
                            "get_all_file_field_metadata",
                            file_format_id=metaobj.file_format_id,
                            is_masked=is_masked)
                    else:
                        fields = paginator(api_instance,
                                           "get_all_file_field_metadata",
                                           is_masked=is_masked)
                else:
                    if metadata_id:
                        fields = paginator(
                            api_instance,
                            "get_all_file_field_metadata",
                            file_format_id=metaobj.file_format_id)
                    else:
                        fields = paginator(api_instance,
                                           "get_all_file_field_metadata")

                if fields.response_list:
                    for c in fields.response_list:
                        column = DxFileField(self.__engine)
                        column.from_file(c)
                        self.__columnList[column.cf_metadata_id] = column
                else:
                    print_error("No field metadata found")
                    self.__logger.error("No field metadata found")

            except self.__apiexc as e:
                if (e.status == 404) and (metadata_id is not None):
                    nofile = 1
                else:
                    print_error(e.body)
                    self.__logger.error(e.body)
                    return 1

        if nofile and notable:
            print_error("Columns for meta id not found")
            return 1
        else:
            return None
示例#5
0
def do_export(**kwargs):
    """
    Export ruleset into external object
    """

    ruleref = kwargs.get('ruleref')
    rulelist = kwargs.get('rulelist')
    exportout = kwargs.get('exportout')
    exportmeta = kwargs.get('exportmeta')
    metaname = kwargs.get('metaname')
    engine_obj = kwargs.get('engine_obj')
    envlist = DxEnvironmentList

    ruleobj = rulelist.get_by_ref(ruleref)
    connobj = DxConnectorsList.get_by_ref(ruleobj.connectorId)

    logger = logging.getLogger()

    ret = 0

    if connobj:
        envobj = envlist.get_by_ref(connobj.environment_id)
        connector_name = connobj.connector_name
        environment_name = envobj.environment_name
    else:
        connector_name = 'N/A'
        environment_name = None

    ruleset = {
        "Ruleset name": ruleobj.ruleset_name,
        "Connector name": connector_name,
        "Environent name": environment_name}

    if exportmeta == 'Y':
        metadatalist = []
        metalist = DxMetaList()
        metalist.LoadMeta(ruleobj.ruleset_id)

        if metaname:
            metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1)
            if metalist_ref is None:
                logger.error("no meta %s found" % metaname)
                return 1
        else:
            metalist_ref = metalist.get_allref()
            if metalist_ref is None:
                logger.error("no meta data found")
                return 1

        for metaid in metalist_ref:
            metaobj = metalist.get_by_ref(metaid)

            if connobj.is_database:
                tabhash = {
                  "table": True,
                  "meta_name": metaobj.meta_name,
                  "key_column": metaobj.key_column,
                  "where_clause": repr(metaobj.where_clause),
                  "custom_sql": repr(metaobj.custom_sql)
                }
            else:
                if metaobj.file_format_id is not None:
                    filetypelist = DxFileFormatList()
                    fileformatobj = filetypelist.get_by_ref(
                        metaobj.file_format_id)
                    fileformatname = fileformatobj.file_format_name
                else:
                    fileformatname = 'N/A'

                tabhash = {
                  "table": False,
                  "meta_name": metaobj.meta_name,
                  "file_format": fileformatname,
                  "file_delimiter": metaobj.delimiter,
                  "file_eor": metaobj.end_of_record,
                  "file_enclosure": metaobj.enclosure,
                  "file_name_regex": metaobj.name_is_regular_expression
                }

            metadatalist.append(tabhash)

        ruleset["Metadata"] = metadatalist
        columndata = column_export(
                        engine_obj.get_name(), None, ruleobj.ruleset_name,
                        environment_name, metaname, None, None)
        ruleset["Columns"] = json.loads(columndata.data_output(False))
    else:
        ruleset["Metadata"] = []
        ruleset["Columns"] = []

    exportout.append(ruleset)
    return ret
示例#6
0
def ruleset_listmeta(p_engine, format, rulesetname, envname, metaname):
    """
    List tables/file from ruleset
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: rulesetname: ruleset name to display metadata from
    param4: envname: environemnt name to display metadata from
    param5: metamame: name of table/file to display
    return 0 if added, non zero for error
    """

    ret = 0
    found = False

    data = DataFormatter()
    data_header = [
                    ("Engine name", 30),
                    ("Environent name", 30),
                    ("Ruleset name", 30),
                    ("Metadata type", 15),
                    ("Metadata name", 32)
                  ]
    data.create_header(data_header)
    data.format_type = format
    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        connlist = DxConnectorsList()
        connlist.LoadConnectors(envname)

        if rulesetname:
            rulesetref_list = rulelist.get_all_rulesetId_by_name(rulesetname)
            if rulesetref_list is None:
                ret = ret + 1
                continue
        else:
            rulesetref_list = rulelist.get_allref()
            if rulesetref_list is None:
                continue

        metalist = DxMetaList()

        for ruleref in rulesetref_list:
            ruleobj = rulelist.get_by_ref(ruleref)
            connobj = connlist.get_by_ref(ruleobj.connectorId)
            if connobj:
                envobj = envlist.get_by_ref(connobj.environment_id)
                environment_name = envobj.environment_name
            else:
                environment_name = 'N/A'

            metalist.LoadMeta(ruleobj.ruleset_id)

            if metaname:
                metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1)
                if metalist_ref is None:
                    ret = ret + 1
                    continue
                found = True
            else:
                metalist_ref = metalist.get_allref()
                if metalist_ref is None:
                    continue

            for metaid in metalist_ref:
                metaobj = metalist.get_by_ref(metaid)
                data.data_insert(
                                  engine_tuple[0],
                                  environment_name,
                                  ruleobj.ruleset_name,
                                  ruleobj.type,
                                  metaobj.meta_name
                                )

    print("")
    print (data.data_output(False))
    print("")

    if found:
        return 0
    else:
        if metaname:
            print_error("Table or file %s not found" % metaname)
        return ret
示例#7
0
def tab_list_details(p_engine, p_format, rulesetname, envname, metaname, what):
    """
    List details of tables/file from ruleset
    param1: p_engine: engine name from configuration
    param2: p_format: output format
    param3: rulesetname: ruleset name to display metadata from
    param4: envname: environemnt name to display metadata from
    param5: metamame: name of table/file to display
    param6: what - Database/File
    return 0 if added, non zero for error
    """

    ret = 0
    found = False

    data = DataFormatter()

    if what == 'Database':
        data_header = [("Engine name", 30), ("Environent name", 30),
                       ("Ruleset name", 30), ("Table name", 32),
                       ("Logical key", 32), ("Where clause", 50),
                       ("Custom SQL", 50)]
    else:
        data_header = [("Engine name", 30), ("Environent name", 30),
                       ("Ruleset name", 30), ("File name", 32),
                       ("File type", 32), ("File format name", 32),
                       ("Delimiter", 10), ("End of record", 10)]

    data.create_header(data_header)

    data.format_type = p_format

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
                                     engine_tuple[2], engine_tuple[3])

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        connlist = DxConnectorsList()
        connlist.LoadConnectors(envname)

        if rulesetname:
            rulesetref_all = rulelist.get_all_rulesetId_by_name(rulesetname)
            if rulesetref_all is None:
                ret = ret + 1
                continue
            rulesetref_list = [
                x for x in rulesetref_all
                if rulelist.get_by_ref(x).type == what
            ]
            if rulesetref_list is None:
                ret = ret + 1
                continue
        else:
            if what == 'Database':
                rulesetref_list = rulelist.get_all_database_rulesetIds()
                if rulesetref_list is None:
                    continue
            else:
                rulesetref_list = rulelist.get_all_file_rulesetIds()
                if rulesetref_list is None:
                    continue

        filetypelist = DxFileFormatList()
        metalist = DxMetaList()

        for ruleref in rulesetref_list:
            ruleobj = rulelist.get_by_ref(ruleref)
            connobj = connlist.get_by_ref(ruleobj.connectorId)
            if connobj:
                envobj = envlist.get_by_ref(connobj.environment_id)
                environment_name = envobj.environment_name
            else:
                environment_name = 'N/A'

            metalist.LoadMeta(ruleobj.ruleset_id)

            if metaname:
                metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1)
                if metalist_ref is None:
                    ret = ret + 1
                    continue
                found = True
            else:
                metalist_ref = metalist.get_allref()
                if metalist_ref is None:
                    continue

            for metaid in metalist_ref:
                metaobj = metalist.get_by_ref(metaid)
                if what == 'Database':
                    data.data_insert(engine_tuple[0], environment_name,
                                     ruleobj.ruleset_name, metaobj.meta_name,
                                     metaobj.key_column,
                                     repr(metaobj.where_clause),
                                     repr(metaobj.custom_sql))
                else:
                    if metaobj.file_format_id is not None:
                        fileformatobj = filetypelist.get_by_ref(
                            metaobj.file_format_id)
                        fileformatname = fileformatobj.file_format_name
                    else:
                        fileformatname = 'N/A'

                    data.data_insert(engine_tuple[0], environment_name,
                                     ruleobj.ruleset_name, metaobj.meta_name,
                                     metaobj.file_type, fileformatname,
                                     metaobj.delimiter,
                                     repr(metaobj.end_of_record))

    print("")
    print(data.data_output(False))
    print("")

    if found:
        return 0
    else:
        if metaname:
            print_error("Table %s not found" % metaname)
        return ret
示例#8
0
def tab_selector(p_engine, rulesetname, envname, metaname, function_to_call,
                 params):
    """
    List details of tables/file from ruleset
    param1: p_engine: engine name from configuration
    param2: p_format: output format
    param3: rulesetname: ruleset name to display metadata from
    param4: envname: environemnt name to display metadata from
    param5: metamame: name of table/file to display
    param6: what - Database/File
    return 0 if added, non zero for error
    """

    ret = 0
    update = False

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
                                     engine_tuple[2], engine_tuple[3])

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        if rulesetname:
            ruleref = rulelist.get_rulesetId_by_name(rulesetname)
        else:
            ruleref = None

        metalist = DxMetaList()
        metalist.LoadMeta(ruleset_id=ruleref)

        metaref = metalist.get_MetadataId_by_name(metaname)

        if metaref:
            metaobj = metalist.get_by_ref(metaref)
        else:
            ret = ret + 1
            continue

        param_map = {
            "custom_sql": "custom_sql",
            "where_clause": "where_clause",
            "having_clause": "having_clause",
            "key_column": "key_column",
            "file_format": "file_format_id",
            "file_delimiter": "delimiter",
            "file_eor": "end_of_record",
            "file_enclosure": "enclosure",
            "file_name_regex": "name_is_regular_expression"
        }

        eor = params["file_eor"]
        if eor == 'custom':
            if params["file_eor_custom"]:
                params["file_eor"] = params["file_eor_custom"]
            else:
                print_error("Custom End of record is unknown")
                return 1

        for p in param_map.keys():
            if params[p]:
                if hasattr(metaobj, param_map[p]):
                    update = True
                    value = params[p]
                    if value == '':
                        value = None
                    setattr(metaobj, param_map[p], value)

        if update:
            ret = ret + metaobj.update()

    return ret
示例#9
0
def column_batch(p_engine, rulesetname, envname, inputfile, inventory):
    """
    Update all columns defined in file
    param1: p_engine: engine name from configuration
    param2: rulesetname: ruleset name
    param3: envname: environment name
    param4: inputfile: file handler with entries
    return 0 if all rows processed without issues
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    if inventory is True:
        mapping = algname_mapping_import()
    else:
        mapping = None

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList(envname)
        metalist = DxMetaList()

        ruleref = rulelist.get_rulesetId_by_name(rulesetname)
        if ruleref:
            ruleobj = rulelist.get_by_ref(ruleref)
        else:
            return 1

        metalist.LoadMeta(ruleobj.ruleset_id)

        metacolumn_list = {}

        for line in inputfile:
            if line.startswith('#'):
                continue
            try:
                logger.debug("readling line %s" % line)
                if inventory is False:
                    if ruleobj.type == "Database":
                        (metaname, column_role, parent_column, column_name,
                         type, domain_name, algname,
                         is_masked_YN, idmethod, rowtype, dateformat) \
                         = line.strip().split(',')
                    else:
                        (metaname, column_name, domain_name, algname,
                         is_masked_YN, priority, recordtype, position, length,
                         dateformat) = line.strip().split(',')
                else:
                    if ruleobj.type == "Database":
                        (env, ruleset, metaname, column_role, parent_column,
                         column_name, type, domain_name, algname,
                         is_masked_YN, idmethod, rowtype, dateformat) \
                         = line.strip().split(',')
                    else:
                        (env, ruleset, metaname, column_name, domain_name,
                         algname, is_masked_YN, priority, recordtype, position,
                         length, dateformat) = line.strip().split(',')
            except ValueError as e:
                if str(e) == "too many values to unpack":
                    logger.error("to few values in inputfile - maybe add "
                                 "--inventory if you are loading an inventory"
                                 "file from GUI")
                    print_error("to few values in inputfile - maybe add "
                                "--inventory if you are loading an inventory"
                                "file from GUI")
                    logger.error("line %s" % line)
                    print_error("line %s" % line)
                    ret = ret + 1
                    break
                else:
                    logger.error("not all columns in file have value")
                    print_error("not all columns in file have value")
                    logger.error("line %s" % line)
                    print_error("line %s" % line)
                    ret = ret + 1
                    break

            metaref = metalist.get_MetadataId_by_name(metaname)
            if metaref is None:
                ret = ret + 1
                continue

            metaobj = metalist.get_by_ref(metaref)

            if metaref not in metacolumn_list:
                logger.debug("reading columns from engine for %s " % metaname)
                collist = DxColumnList()
                collist.LoadColumns(metadata_id=metaref)
                metacolumn_list[metaref] = collist

            colref = metacolumn_list[metaref].get_column_id_by_name(
                column_name)

            if colref:
                colobj = metacolumn_list[metaref].get_by_ref(colref)
                if is_masked_YN == 'Y' or is_masked_YN == 'true':
                    is_masked = True
                else:
                    is_masked = False

                if algname == '' or algname == '-':
                    algname = 'None'

                if domain_name == '' or domain_name == '-':
                    domain_name = 'None'

                if ruleobj.type == "Database":
                    if idmethod == 'Auto':
                        colobj.is_profiler_writable = True
                    elif idmethod == 'User':
                        colobj.is_profiler_writable = False
                    else:
                        print_error("Wrong id method")
                        return 1

                if dateformat == '-':
                    colobj.date_format = None
                else:
                    colobj.date_format = dateformat

                if mapping is not None and algname != 'None':
                    try:
                        algname = mapping[algname]
                    except KeyError as e:
                        logger.debug("Wrong algoritm name in input file"
                                     ". Not an inventory file ?")
                        logger.debug(str(e))
                        print_error("Wrong algoritm name in input file"
                                    ". Not an inventory file ?")
                        return 1

                ret = ret + update_algorithm(colobj=colobj,
                                             algname=algname,
                                             domainname=domain_name,
                                             metaobj=metaobj,
                                             ruleobj=ruleobj,
                                             is_masked=is_masked)
            else:
                ret = ret + 1
                continue

    return ret
示例#10
0
def column_worker(p_engine,
                  sortby,
                  rulesetname,
                  envname,
                  metaname,
                  columnname,
                  filter_algname,
                  filter_is_masked,
                  algname,
                  is_masked,
                  domainname,
                  function_to_call,
                  data=None,
                  inventory=None,
                  **kwargs):
    """
    Select a column using all filter parameters
    and run action defined in function_to_call

    param1: p_engine: engine name from configuration
    param2: sortby: sort by output if needed
    param3: rulesetname: ruleset name
    param4: envname: environment name
    param5: metaname: meta name (table or file)
    param6: columnname: column name (column or field)
    param7: filter_algname: algorithm name to filter
    param8: filter_is_masked: is masked fileter
    param9: algname: new algorithm to set
    param10: is_masked: set masking False/True
    param11: domainname: new domain to set
    param12: function_to_call: function name to call
    param13: data: output object
    return 0 action is processed without issues
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList(envname)
        connlist = DxConnectorsList(envname)
        metalist = DxMetaList()

        rulesetref_list = []

        if rulesetname:
            ruleref = rulelist.get_rulesetId_by_name(rulesetname)
            if ruleref:
                rulesetref_list.append(ruleref)
        else:
            rulesetref_list = rulelist.get_allref()

        for ruleref in rulesetref_list:
            ruleobj = rulelist.get_by_ref(ruleref)
            connobj = connlist.get_by_ref(ruleobj.connectorId)

            if connobj:
                envobj = envlist.get_by_ref(connobj.environment_id)
            else:
                envobj = None

            metalist.LoadMeta(ruleobj.ruleset_id)

            metasetref_list = []

            if metaname:
                metaref = metalist.get_MetadataId_by_name(metaname, 1)
                if metaref:
                    metasetref_list.append(metaref)
            else:
                metasetref_list = metalist.get_allref()

            for metaid in metasetref_list:
                metaobj = metalist.get_by_ref(metaid)
                collist = DxColumnList()
                collist.LoadColumns(metadata_id=metaid,
                                    is_masked=filter_is_masked)

                colsetref_list = []

                colcount = kwargs.get("colcount")
                if colcount is not None:
                    colcount.extend(collist.get_allref())

                if columnname:
                    colref = collist.get_column_id_by_name(columnname)
                    logger.debug("Column ref with name %s : %s" %
                                 (columnname, colref))
                    if colref:
                        colsetref_list.append(colref)
                else:
                    colsetref_list = collist.get_allref()

                logger.debug("List of columns to process : %s" %
                             colsetref_list)

                if filter_algname:
                    colsetref_masked = collist.get_column_id_by_algorithm(
                        filter_algname)
                    logger.debug("List of columns with algorithm %s : %s" %
                                 (filter_algname, colsetref_masked))
                    colsetref_list = list(
                        set(colsetref_list)
                        & set(colsetref_masked))
                    logger.debug("Intersection with column name filter %s" %
                                 colsetref_masked)

                for colref in colsetref_list:
                    colobj = collist.get_by_ref(colref)

                    dynfunc = globals()[function_to_call]
                    ret = ret + dynfunc(data=data,
                                        engine=engine_tuple,
                                        envobj=envobj,
                                        ruleobj=ruleobj,
                                        metaobj=metaobj,
                                        colobj=colobj,
                                        algname=algname,
                                        is_masked=is_masked,
                                        domainname=domainname,
                                        inventory=inventory,
                                        **kwargs)
    return ret
示例#11
0
def column_batch(p_engine, p_username,  rulesetname, envname, inputfile, inventory):
    """
    Update all columns defined in file
    param1: p_engine: engine name from configuration
    param2: rulesetname: ruleset name
    param3: envname: environment name
    param4: inputfile: file handler with entries
    return 0 if all rows processed without issues
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    if inventory is True:
        mapping = algname_mapping_import()
    else:
        mapping = None


    database_list = "metaname column_role parent_column column_name \
                     type domain_name algname is_masked_YN idmethod rowtype dateformat"

    file_list = "metaname column_name domain_name algname \
                 is_masked_YN priority recordtype position \
                 length dateformat"

    inventory_addition = "env ruleset"

    multicolumn_addition = "fieldid groupid"

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList(envname)
        metalist = DxMetaList()

        alg_list = DxAlgorithmList()

        ruleref = rulelist.get_rulesetId_by_name(rulesetname)
        if ruleref:
            ruleobj = rulelist.get_by_ref(ruleref)
        else:
            return 1

        metalist.LoadMeta(ruleobj.ruleset_id)

        metacolumn_list = {}


        setversion = False


        for line in inputfile:
            if not setversion:
                setversion = True
                
                if ruleobj.type == "Database":  
                    collist = database_list
                else:
                    collist = file_list

                if inventory is True:
                    collist = inventory_addition + " " + collist

                if "Multi-Column" in line:
                    # we have a 6.0.8 or higher inventory 
                    if inventory is True:
                        collist = collist + " notes " + multicolumn_addition
                    else:
                        collist = collist + " " + multicolumn_addition

                linetype = namedtuple("linetype", collist)

            if line.startswith('#'):
                continue
            try:
                logger.debug("readling line %s" % line)
                lineobj = linetype(*line.strip().split(','))

            except ValueError as e:
                if str(e) == "too many values to unpack":
                    logger.error("to few values in inputfile - maybe add "
                                 "--inventory if you are loading an inventory"
                                 "file from GUI")
                    print_error("to few values in inputfile - maybe add "
                                "--inventory if you are loading an inventory"
                                "file from GUI")
                    logger.error("line %s" % line)
                    print_error("line %s" % line)
                    ret = ret + 1
                    break
                else:
                    logger.error("not all columns in file have value")
                    print_error("not all columns in file have value")
                    logger.error("line %s" % line)
                    print_error("line %s" % line)
                    ret = ret + 1
                    break

            metaref = metalist.get_MetadataId_by_name(lineobj.metaname)
            if metaref is None:
                ret = ret + 1
                continue

            metaobj = metalist.get_by_ref(metaref)

            if metaref not in metacolumn_list:
                logger.debug("reading columns from engine for %s " % lineobj.metaname)
                collist = DxColumnList()
                collist.LoadColumns(metadata_id=metaref)
                metacolumn_list[metaref] = collist

            colref = metacolumn_list[metaref].get_column_id_by_name(
                lineobj.column_name)

            if colref:
                colobj = metacolumn_list[metaref].get_by_ref(colref)
                if lineobj.is_masked_YN == 'Y' or lineobj.is_masked_YN == 'true':
                    is_masked = True
                else:
                    is_masked = False

                if lineobj.algname == '' or lineobj.algname == '-':
                    algname = 'None'
                else:
                    algname = lineobj.algname

                if lineobj.domain_name == '' or lineobj.domain_name == '-':
                    domain_name = 'None'
                else:
                    domain_name = lineobj.domain_name 


                if hasattr(lineobj, 'fieldid'):
                    if lineobj.fieldid == '' or lineobj.fieldid == '-':
                        fieldid = None
                    else:
                        fieldid = lineobj.fieldid 
                else:
                    fieldid = None

                if ruleobj.type == "Database":
                    if lineobj.idmethod == 'Auto':
                        colobj.is_profiler_writable = True
                    elif lineobj.idmethod == 'User':
                        colobj.is_profiler_writable = False
                    else:
                        print_error("Wrong id method")
                        return 1

                if lineobj.dateformat == '-':
                    colobj.date_format = None
                else:
                    colobj.date_format = lineobj.dateformat

                if fieldid is not None:
                    algobj = alg_list.get_by_ref(lineobj.algname)
                    field_id = [ x.field_id for x in algobj.fields if x.name == fieldid ][0]
                    group_id = lineobj.groupid
                else:
                    field_id = None
                    group_id = None


                if mapping is not None and algname != 'None' and algname in mapping:
                    logger.debug("changing a name of algorithm for inventory import: from {} to {}".format(algname, mapping[algname]))
                    algname = mapping[algname]

                ret = ret + update_algorithm(colobj=colobj,
                                             algname=algname,
                                             domainname=domain_name,
                                             metaobj=metaobj,
                                             ruleobj=ruleobj,
                                             is_masked=is_masked,
                                             algorithm_field_id=field_id,
                                             algorithm_group_no=group_id)
            else:
                ret = ret + 1
                continue

    return ret
示例#12
0
def column_batch(p_engine, rulesetname, envname, inputfile):
    """
    Update all columns defined in file
    param1: p_engine: engine name from configuration
    param2: rulesetname: ruleset name
    param3: envname: environment name
    param4: inputfile: file handler with entries
    return 0 if all rows processed without issues
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
                                     engine_tuple[2], engine_tuple[3])

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        metalist = DxMetaList()

        ruleref = rulelist.get_rulesetId_by_name(rulesetname)
        if ruleref:
            ruleobj = rulelist.get_by_ref(ruleref)
        else:
            return 1

        metalist.LoadMeta(ruleobj.ruleset_id)

        metacolumn_list = {}

        for line in inputfile:
            if line.startswith('#'):
                continue
            try:
                logger.debug("readling line %s" % line)
                if ruleobj.type == "Database":
                    (metaname, column_role, parent_column, column_name, type,
                     domain_name, algname,
                     is_masked_YN) = line.strip().split(',')
                else:
                    (metaname, column_name, domain_name, algname,
                     is_masked_YN) = line.strip().split(',')

            except ValueError:
                logger.error("not all columns in file have value")
                print_error("not all columns in file have value")
                logger.error("line %s" % line)
                print_error("line %s" % line)
                ret = ret + 1
                continue

            metaref = metalist.get_MetadataId_by_name(metaname)
            if metaref is None:
                ret = ret + 1
                continue

            metaobj = metalist.get_by_ref(metaref)

            if metaref not in metacolumn_list:
                logger.debug("reading columns from engine for %s " % metaname)
                collist = DxColumnList()
                collist.LoadColumns(metadata_id=metaref)
                metacolumn_list[metaref] = collist

            colref = metacolumn_list[metaref].get_column_id_by_name(
                column_name)

            if colref:
                colobj = metacolumn_list[metaref].get_by_ref(colref)
                if is_masked_YN == 'Y':
                    is_masked = True
                else:
                    is_masked = False

                if algname == '':
                    algname = 'None'

                if domain_name == '':
                    domain_name = 'None'

                update_algorithm(colobj=colobj,
                                 algname=algname,
                                 domainname=domain_name,
                                 metaobj=metaobj,
                                 ruleobj=ruleobj,
                                 is_masked=is_masked)
            else:
                ret = ret + 1
                continue

    return ret