Exemplo n.º 1
0
def ruleset_addmeta(p_engine, p_username, params, inputfile, fromconnector,
                    bulk):
    """
    Add matadata to Masking engine
    param1: p_engine: engine name from configuration
    param2: params: set of required parameters to add meta
    param3: inputfile: file with table/file definition
    return 0 if added, non 0 for error
    """

    ret = 0

    rulesetname = params["rulesetname"]
    envname = params["envname"]

    enginelist = get_list_of_engines(p_engine, p_username)

    if (params["metaname"] is None) and (inputfile is None) and (fromconnector
                                                                 is None):
        print_error("Option metaname, inputfile or fromconnector is required")
        return 1

    if ((params["metaname"]) and inputfile) or \
       ((params["metaname"]) and fromconnector) or \
       (inputfile and fromconnector):
        print_error(
            "Option metaname, fromconnector and inputfile are mutally exclusive"
        )
        return 1

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList(envname)
        #rulelist.LoadRulesets()
        ruleref = rulelist.get_rulesetId_by_name(rulesetname)

        if ruleref:
            ruleobj = rulelist.get_by_ref(ruleref)
            if (params["metaname"]):
                ret = ret + ruleobj.addmeta(params)
            elif inputfile:
                ret = ret + ruleobj.addmetafromfile(inputfile, bulk)
            elif fromconnector:
                ret = ret + ruleobj.addmetafromfetch(params["fetchfilter"],
                                                     bulk)
            else:
                print_error("Source for add meta is not specified")
        else:
            ret = ret + 1

    return ret
Exemplo n.º 2
0
def profilejob_add(p_engine, params):
    """
    Add profile job to Masking engine
    param1: p_engine: engine name from configuration
    param2: params: job parameters
    return 0 if added, non 0 for error
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine)

    logger = logging.getLogger()

    envname = params['envname']
    jobname = params['jobname']
    rulesetname = params['rulesetname']
    profilename = params['profilename']

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        joblist = DxProfileJobsList()
        envlist = DxEnvironmentList()
        rulesetlist = DxRulesetList()
        profilesetlist = DxProfilesList()
        profileref = profilesetlist.get_profileSetId_by_name(profilename)
        envlist.LoadEnvironments()
        logger.debug("Envname is %s, job name is %s" % (envname, jobname))
        rulesetlist.LoadRulesets(envname)
        rulesetref = rulesetlist.get_rulesetId_by_name(rulesetname)

        job = DxProfileJob(engine_obj, None)
        job.ruleset_id = rulesetref
        job.job_name = jobname
        job.profile_set_id = profileref

        for p in masking_params_list:
            if params[p] is not None:
                if params[p] == 'Y':
                    value = True
                elif params[p] == 'N':
                    value = False
                else:
                    value = params[p]
                setattr(job, p, value)

        if joblist.add(job):
            ret = ret + 1

    return ret
Exemplo n.º 3
0
def ruleset_add(p_engine, p_username, rulesetname, connectorname, envname):
    """
    Add ruleset to Masking engine
    param1: p_engine: engine name from configuration
    param2: rulesetname: ruleset name
    param3: connectorname: connectorname name
    param4: envname: environment name
    return 0 if added, non 0 for error
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        connlist = DxConnectorsList(envname)
        #connlist.LoadConnectors()
        logger.debug("Connector is %s" % connectorname)
        connref = connlist.get_connectorId_by_name(connectorname)
        connobj = connlist.get_by_ref(connref)
        if connobj:
            if connobj.is_database:
                ruleset = DxDatabaseRuleset(engine_obj)
                ruleset.create_database_ruleset(
                    ruleset_name=rulesetname,
                    database_connector_id=connobj.connectorId,
                    refresh_drops_tables=None)
            else:
                ruleset = DxFileRuleset(engine_obj)
                ruleset.create_file_ruleset(
                    ruleset_name=rulesetname,
                    file_connector_id=connobj.connectorId)

            if rulelist.add(ruleset):
                ret = ret + 1
        else:
            ret = ret + 1

    return ret
Exemplo n.º 4
0
def ruleset_addmeta(p_engine, params, inputfile):
    """
    Add matadata to Masking engine
    param1: p_engine: engine name from configuration
    param2: params: set of required parameters to add meta
    param3: inputfile: file with table/file definition
    return 0 if added, non 0 for error
    """

    ret = 0

    rulesetname = params["rulesetname"]
    envname = params["envname"]

    enginelist = get_list_of_engines(p_engine)

    if (params["metaname"] is None) and (inputfile is None):
        print_error("Option metaname or inputfile is required")
        return 1

    if (params["metaname"]) and (inputfile):
        print_error("Option metaname and inputfile are mutally exclusive")
        return 1

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
                                     engine_tuple[2], engine_tuple[3])

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        ruleref = rulelist.get_rulesetId_by_name(rulesetname)

        if ruleref:
            ruleobj = rulelist.get_by_ref(ruleref)
            if (params["metaname"]):
                ret = ret + ruleobj.addmeta(params)
            else:
                ret = ret + ruleobj.addmetafromfile(inputfile)
        else:
            ret = ret + 1

    return ret
Exemplo n.º 5
0
def ruleset_worker(**kwargs):
    """
    Delete ruleset from Masking engine
    param1: p_engine: engine name from configuration
    param2: rulesetname: ruleset name
    param3: function_to_call: function to call for particual ruleset
    return 0 if added, non 0 for error
    """

    p_engine = kwargs.get('p_engine')
    rulesetname = kwargs.get('rulesetname')
    envname = kwargs.get('envname')
    function_to_call = kwargs.get('function_to_call')

    ret = 0

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
                                     engine_tuple[2], engine_tuple[3])
        if engine_obj.get_session():
            continue

        # envlist = DxEnvironmentList()
        # envlist.LoadEnvironments()
        # rulelist = DxRulesetList()
        rulelist = DxRulesetList(envname)
        ruleref = rulelist.get_rulesetId_by_name(rulesetname)
        if ruleref:
            dynfunc = globals()[function_to_call]
            if dynfunc(ruleref=ruleref,
                       rulelist=rulelist,
                       engine_obj=engine_obj,
                       **kwargs):
                ret = ret + 1
        else:
            ret = ret + 1

    return ret
Exemplo n.º 6
0
def ruleset_deletemeta(p_engine, rulesetname, metaname, envname):
    """
    Delete meta (file, table) from ruleset to Masking engine
    param1: p_engine: engine name from configuration
    param2: rulesetname: ruleset name
    param3: metaname: metaname to delete
    param4: envname: environment name
    return 0 if added, non 0 for error
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
                                     engine_tuple[2], engine_tuple[3])

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        ruleref = rulelist.get_rulesetId_by_name(rulesetname)

        metalist = DxMetaList()
        metalist.LoadMeta(ruleset_id=ruleref)

        metaref = metalist.get_MetadataId_by_name(metaname)

        if metaref:
            if metalist.delete(metaref):
                ret = ret + 1
        else:
            ret = ret + 1

    return ret
Exemplo n.º 7
0
def ruleset_list_worker(**kwargs):
    """
    Print list of ruleset by ruleset name or environment name
    param1: p_engine: engine name from configuration
    param2: format: output format
    param2: ruleset: name of ruleset to display
    param3: envname: name of environment to list ruleset
    return 0 if environment found
    """

    p_engine = kwargs.get('p_engine')
    format = kwargs.get('format')
    rulesetName = kwargs.get('rulesetName')
    envname = kwargs.get('envname')
    function_to_call = kwargs.get('function_to_call')
    data = kwargs.get('data')

    ret = 0

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue

        #envlist = DxEnvironmentList()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)

        if rulesetName is None:
            rulesets = rulelist.get_allref()
            if len(rulesets) == 0:
                ret = ret + 1
                continue
        else:
            rulesets = rulelist.get_all_rulesetId_by_name(rulesetName)
            if rulesets is None:
                ret = ret + 1
                continue

        # connlist = DxConnectorsList(envname)
        # connlist.LoadConnectors(envname)

        for ruleid in rulesets:
            ruleobj = rulelist.get_by_ref(ruleid)
            connobj = DxConnectorsList.get_by_ref(ruleobj.connectorId)

            dynfunc = globals()[function_to_call]
            if dynfunc(ruleobj=ruleobj, connobj=connobj,
                       # envlist=envlist,
                       engine_obj=engine_obj, **kwargs):
                ret = ret + 1
                continue

    return ret
Exemplo n.º 8
0
def column_batch(p_engine, rulesetname, envname, inputfile, inventory):
    """
    Update all columns defined in file
    param1: p_engine: engine name from configuration
    param2: rulesetname: ruleset name
    param3: envname: environment name
    param4: inputfile: file handler with entries
    return 0 if all rows processed without issues
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    if inventory is True:
        mapping = algname_mapping_import()
    else:
        mapping = None

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList(envname)
        metalist = DxMetaList()

        ruleref = rulelist.get_rulesetId_by_name(rulesetname)
        if ruleref:
            ruleobj = rulelist.get_by_ref(ruleref)
        else:
            return 1

        metalist.LoadMeta(ruleobj.ruleset_id)

        metacolumn_list = {}

        for line in inputfile:
            if line.startswith('#'):
                continue
            try:
                logger.debug("readling line %s" % line)
                if inventory is False:
                    if ruleobj.type == "Database":
                        (metaname, column_role, parent_column, column_name,
                         type, domain_name, algname,
                         is_masked_YN, idmethod, rowtype, dateformat) \
                         = line.strip().split(',')
                    else:
                        (metaname, column_name, domain_name, algname,
                         is_masked_YN, priority, recordtype, position, length,
                         dateformat) = line.strip().split(',')
                else:
                    if ruleobj.type == "Database":
                        (env, ruleset, metaname, column_role, parent_column,
                         column_name, type, domain_name, algname,
                         is_masked_YN, idmethod, rowtype, dateformat) \
                         = line.strip().split(',')
                    else:
                        (env, ruleset, metaname, column_name, domain_name,
                         algname, is_masked_YN, priority, recordtype, position,
                         length, dateformat) = line.strip().split(',')
            except ValueError as e:
                if str(e) == "too many values to unpack":
                    logger.error("to few values in inputfile - maybe add "
                                 "--inventory if you are loading an inventory"
                                 "file from GUI")
                    print_error("to few values in inputfile - maybe add "
                                "--inventory if you are loading an inventory"
                                "file from GUI")
                    logger.error("line %s" % line)
                    print_error("line %s" % line)
                    ret = ret + 1
                    break
                else:
                    logger.error("not all columns in file have value")
                    print_error("not all columns in file have value")
                    logger.error("line %s" % line)
                    print_error("line %s" % line)
                    ret = ret + 1
                    break

            metaref = metalist.get_MetadataId_by_name(metaname)
            if metaref is None:
                ret = ret + 1
                continue

            metaobj = metalist.get_by_ref(metaref)

            if metaref not in metacolumn_list:
                logger.debug("reading columns from engine for %s " % metaname)
                collist = DxColumnList()
                collist.LoadColumns(metadata_id=metaref)
                metacolumn_list[metaref] = collist

            colref = metacolumn_list[metaref].get_column_id_by_name(
                column_name)

            if colref:
                colobj = metacolumn_list[metaref].get_by_ref(colref)
                if is_masked_YN == 'Y' or is_masked_YN == 'true':
                    is_masked = True
                else:
                    is_masked = False

                if algname == '' or algname == '-':
                    algname = 'None'

                if domain_name == '' or domain_name == '-':
                    domain_name = 'None'

                if ruleobj.type == "Database":
                    if idmethod == 'Auto':
                        colobj.is_profiler_writable = True
                    elif idmethod == 'User':
                        colobj.is_profiler_writable = False
                    else:
                        print_error("Wrong id method")
                        return 1

                if dateformat == '-':
                    colobj.date_format = None
                else:
                    colobj.date_format = dateformat

                if mapping is not None and algname != 'None':
                    try:
                        algname = mapping[algname]
                    except KeyError as e:
                        logger.debug("Wrong algoritm name in input file"
                                     ". Not an inventory file ?")
                        logger.debug(str(e))
                        print_error("Wrong algoritm name in input file"
                                    ". Not an inventory file ?")
                        return 1

                ret = ret + update_algorithm(colobj=colobj,
                                             algname=algname,
                                             domainname=domain_name,
                                             metaobj=metaobj,
                                             ruleobj=ruleobj,
                                             is_masked=is_masked)
            else:
                ret = ret + 1
                continue

    return ret
Exemplo n.º 9
0
def tab_list_details(p_engine, p_format, rulesetname, envname, metaname, what):
    """
    List details of tables/file from ruleset
    param1: p_engine: engine name from configuration
    param2: p_format: output format
    param3: rulesetname: ruleset name to display metadata from
    param4: envname: environemnt name to display metadata from
    param5: metamame: name of table/file to display
    param6: what - Database/File
    return 0 if added, non zero for error
    """

    ret = 0
    found = False

    data = DataFormatter()

    if what == 'Database':
        data_header = [("Engine name", 30), ("Environent name", 30),
                       ("Ruleset name", 30), ("Table name", 32),
                       ("Logical key", 32), ("Where clause", 50),
                       ("Custom SQL", 50)]
    else:
        data_header = [("Engine name", 30), ("Environent name", 30),
                       ("Ruleset name", 30), ("File name", 32),
                       ("File type", 32), ("File format name", 32),
                       ("Delimiter", 10), ("End of record", 10)]

    data.create_header(data_header)

    data.format_type = p_format

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
                                     engine_tuple[2], engine_tuple[3])

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        connlist = DxConnectorsList()
        connlist.LoadConnectors(envname)

        if rulesetname:
            rulesetref_all = rulelist.get_all_rulesetId_by_name(rulesetname)
            if rulesetref_all is None:
                ret = ret + 1
                continue
            rulesetref_list = [
                x for x in rulesetref_all
                if rulelist.get_by_ref(x).type == what
            ]
            if rulesetref_list is None:
                ret = ret + 1
                continue
        else:
            if what == 'Database':
                rulesetref_list = rulelist.get_all_database_rulesetIds()
                if rulesetref_list is None:
                    continue
            else:
                rulesetref_list = rulelist.get_all_file_rulesetIds()
                if rulesetref_list is None:
                    continue

        filetypelist = DxFileFormatList()
        metalist = DxMetaList()

        for ruleref in rulesetref_list:
            ruleobj = rulelist.get_by_ref(ruleref)
            connobj = connlist.get_by_ref(ruleobj.connectorId)
            if connobj:
                envobj = envlist.get_by_ref(connobj.environment_id)
                environment_name = envobj.environment_name
            else:
                environment_name = 'N/A'

            metalist.LoadMeta(ruleobj.ruleset_id)

            if metaname:
                metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1)
                if metalist_ref is None:
                    ret = ret + 1
                    continue
                found = True
            else:
                metalist_ref = metalist.get_allref()
                if metalist_ref is None:
                    continue

            for metaid in metalist_ref:
                metaobj = metalist.get_by_ref(metaid)
                if what == 'Database':
                    data.data_insert(engine_tuple[0], environment_name,
                                     ruleobj.ruleset_name, metaobj.meta_name,
                                     metaobj.key_column,
                                     repr(metaobj.where_clause),
                                     repr(metaobj.custom_sql))
                else:
                    if metaobj.file_format_id is not None:
                        fileformatobj = filetypelist.get_by_ref(
                            metaobj.file_format_id)
                        fileformatname = fileformatobj.file_format_name
                    else:
                        fileformatname = 'N/A'

                    data.data_insert(engine_tuple[0], environment_name,
                                     ruleobj.ruleset_name, metaobj.meta_name,
                                     metaobj.file_type, fileformatname,
                                     metaobj.delimiter,
                                     repr(metaobj.end_of_record))

    print("")
    print(data.data_output(False))
    print("")

    if found:
        return 0
    else:
        if metaname:
            print_error("Table %s not found" % metaname)
        return ret
Exemplo n.º 10
0
def ruleset_listmeta(p_engine, format, rulesetname, envname, metaname):
    """
    List tables/file from ruleset
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: rulesetname: ruleset name to display metadata from
    param4: envname: environemnt name to display metadata from
    param5: metamame: name of table/file to display
    return 0 if added, non zero for error
    """

    ret = 0
    found = False

    data = DataFormatter()
    data_header = [
                    ("Engine name", 30),
                    ("Environent name", 30),
                    ("Ruleset name", 30),
                    ("Metadata type", 15),
                    ("Metadata name", 32)
                  ]
    data.create_header(data_header)
    data.format_type = format
    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        connlist = DxConnectorsList()
        connlist.LoadConnectors(envname)

        if rulesetname:
            rulesetref_list = rulelist.get_all_rulesetId_by_name(rulesetname)
            if rulesetref_list is None:
                ret = ret + 1
                continue
        else:
            rulesetref_list = rulelist.get_allref()
            if rulesetref_list is None:
                continue

        metalist = DxMetaList()

        for ruleref in rulesetref_list:
            ruleobj = rulelist.get_by_ref(ruleref)
            connobj = connlist.get_by_ref(ruleobj.connectorId)
            if connobj:
                envobj = envlist.get_by_ref(connobj.environment_id)
                environment_name = envobj.environment_name
            else:
                environment_name = 'N/A'

            metalist.LoadMeta(ruleobj.ruleset_id)

            if metaname:
                metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1)
                if metalist_ref is None:
                    ret = ret + 1
                    continue
                found = True
            else:
                metalist_ref = metalist.get_allref()
                if metalist_ref is None:
                    continue

            for metaid in metalist_ref:
                metaobj = metalist.get_by_ref(metaid)
                data.data_insert(
                                  engine_tuple[0],
                                  environment_name,
                                  ruleobj.ruleset_name,
                                  ruleobj.type,
                                  metaobj.meta_name
                                )

    print("")
    print (data.data_output(False))
    print("")

    if found:
        return 0
    else:
        if metaname:
            print_error("Table or file %s not found" % metaname)
        return ret
Exemplo n.º 11
0
def job_add(p_engine, params):
    """
    Add masking job to Masking engine
    param1: p_engine: engine name from configuration
    param2: params: job parameters
    return 0 if added, non 0 for error
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine)

    logger = logging.getLogger()

    envname = params['envname']
    jobname = params['jobname']
    rulesetname = params['rulesetname']

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        joblist = DxJobsList()
        envlist = DxEnvironmentList()
        rulesetlist = DxRulesetList()
        envlist.LoadEnvironments()
        logger.debug("Envname is %s, job name is %s" % (envname, jobname))
        rulesetlist.LoadRulesets(envname)

        rulesetref = rulesetlist.get_rulesetId_by_name(rulesetname)

        job = DxJob(engine_obj, None)
        job.ruleset_id = rulesetref
        job.job_name = jobname

        for p in optional_params_list:
            if params[p] is not None:
                if params[p] == 'Y':
                    value = True
                elif params[p] == 'N':
                    value = False
                else:
                    value = params[p]
                setattr(job, p, value)

        dmo = DatabaseMaskingOptions()

        for p in optional_options_list:
            if params[p] is not None:
                if params[p] == 'Y':
                    value = True
                elif params[p] == 'N':
                    value = False
                else:
                    value = params[p]
                setattr(job, p, value)

        if params["prescript"]:
            prescript = MaskingJobScript()
            prescript.contents = ''.join(params["prescript"].readlines())
            prescript.name = params["prescript"].name
            dmo.prescript = prescript

        if params["postscript"]:
            postscript = MaskingJobScript()
            postscript.contents = ''.join(params["postscript"].readlines())
            postscript.name = params["postscript"].name
            dmo.postscript = postscript

        job.database_masking_options = dmo

        if joblist.add(job):
            ret = ret + 1

    return ret
Exemplo n.º 12
0
def tab_selector(p_engine, rulesetname, envname, metaname, function_to_call,
                 params):
    """
    List details of tables/file from ruleset
    param1: p_engine: engine name from configuration
    param2: p_format: output format
    param3: rulesetname: ruleset name to display metadata from
    param4: envname: environemnt name to display metadata from
    param5: metamame: name of table/file to display
    param6: what - Database/File
    return 0 if added, non zero for error
    """

    ret = 0
    update = False

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
                                     engine_tuple[2], engine_tuple[3])

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        if rulesetname:
            ruleref = rulelist.get_rulesetId_by_name(rulesetname)
        else:
            ruleref = None

        metalist = DxMetaList()
        metalist.LoadMeta(ruleset_id=ruleref)

        metaref = metalist.get_MetadataId_by_name(metaname)

        if metaref:
            metaobj = metalist.get_by_ref(metaref)
        else:
            ret = ret + 1
            continue

        param_map = {
            "custom_sql": "custom_sql",
            "where_clause": "where_clause",
            "having_clause": "having_clause",
            "key_column": "key_column",
            "file_format": "file_format_id",
            "file_delimiter": "delimiter",
            "file_eor": "end_of_record",
            "file_enclosure": "enclosure",
            "file_name_regex": "name_is_regular_expression"
        }

        eor = params["file_eor"]
        if eor == 'custom':
            if params["file_eor_custom"]:
                params["file_eor"] = params["file_eor_custom"]
            else:
                print_error("Custom End of record is unknown")
                return 1

        for p in param_map.keys():
            if params[p]:
                if hasattr(metaobj, param_map[p]):
                    update = True
                    value = params[p]
                    if value == '':
                        value = None
                    setattr(metaobj, param_map[p], value)

        if update:
            ret = ret + metaobj.update()

    return ret
Exemplo n.º 13
0
def column_save(p_engine, sortby, rulesetname, envname, metaname, columnname,
                algname, is_masked, file, inventory):
    """
    Print column list
    param1: p_engine: engine name from configuration
    param2: sortby: sort by output if needed
    param3: rulesetname: ruleset name
    param4: envname: environment name
    param5: metaname: meta name (table or file)
    param6: columnname: column name (column or field)
    param7: algname: algorithm name to filter
    param8: is_masked: is masked fileter
    param9: file: file to write output
    return 0 if no issues
    """

    if p_engine == 'all':
        print_error("you can't run column save command on all engines"
                    "at same time")
        return 1

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    engine_tuple = enginelist[-1]

    engine_obj = DxMaskingEngine(engine_tuple)

    if engine_obj.get_session():
        return 1

    rulelist = DxRulesetList(envname)
    ruleref = rulelist.get_rulesetId_by_name(rulesetname)

    ruleobj = rulelist.get_by_ref(ruleref)

    if ruleobj is None:
        return 1

    if ruleobj.type == "Database":
        data = DataFormatter()
        data_header = [("Table Name", 32), ("Type", 5),
                       ("Parent Column Name", 5), ("Column Name", 32),
                       ("Data Type", 32), ("Domain", 32), ("Algorithm", 32),
                       ("Is Masked", 32), ("ID Method", 32), ("Row Type", 32),
                       ("Date Format", 32)]
        worker = "do_save_database"
    else:
        data = DataFormatter()
        data_header = [("File Name", 32), ("Field Name", 5), ("Domain", 32),
                       ("Algorithm", 32), ("Is Masked", 32), ("Priority", 8),
                       ("Record Type", 15), ("Position", 8), ("Length", 8),
                       ("Date Format", 32)]
        worker = "do_save_file"

    if inventory is True:
        data_header = [("Environment Name", 32),
                       ("Rule Set", 32)] + data_header

    data.create_header(data_header, inventory)
    data.format_type = "csv"

    ret = column_worker(p_engine,
                        sortby,
                        rulesetname,
                        envname,
                        metaname,
                        columnname,
                        algname,
                        is_masked,
                        None,
                        None,
                        None,
                        worker,
                        data=data,
                        inventory=inventory)

    if ret == 0:
        output = data.data_output(False, sortby)
        try:
            file.write(output)
            file.close()
            print_message("Columns saved to file %s" % file.name)
            return 0
        except Exception as e:
            print_error("Problem with file %s Error: %s" % (file.name, str(e)))
            return 1

    else:
        return ret
Exemplo n.º 14
0
def column_batch(p_engine, p_username,  rulesetname, envname, inputfile, inventory):
    """
    Update all columns defined in file
    param1: p_engine: engine name from configuration
    param2: rulesetname: ruleset name
    param3: envname: environment name
    param4: inputfile: file handler with entries
    return 0 if all rows processed without issues
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    if inventory is True:
        mapping = algname_mapping_import()
    else:
        mapping = None


    database_list = "metaname column_role parent_column column_name \
                     type domain_name algname is_masked_YN idmethod rowtype dateformat"

    file_list = "metaname column_name domain_name algname \
                 is_masked_YN priority recordtype position \
                 length dateformat"

    inventory_addition = "env ruleset"

    multicolumn_addition = "fieldid groupid"

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList(envname)
        metalist = DxMetaList()

        alg_list = DxAlgorithmList()

        ruleref = rulelist.get_rulesetId_by_name(rulesetname)
        if ruleref:
            ruleobj = rulelist.get_by_ref(ruleref)
        else:
            return 1

        metalist.LoadMeta(ruleobj.ruleset_id)

        metacolumn_list = {}


        setversion = False


        for line in inputfile:
            if not setversion:
                setversion = True
                
                if ruleobj.type == "Database":  
                    collist = database_list
                else:
                    collist = file_list

                if inventory is True:
                    collist = inventory_addition + " " + collist

                if "Multi-Column" in line:
                    # we have a 6.0.8 or higher inventory 
                    if inventory is True:
                        collist = collist + " notes " + multicolumn_addition
                    else:
                        collist = collist + " " + multicolumn_addition

                linetype = namedtuple("linetype", collist)

            if line.startswith('#'):
                continue
            try:
                logger.debug("readling line %s" % line)
                lineobj = linetype(*line.strip().split(','))

            except ValueError as e:
                if str(e) == "too many values to unpack":
                    logger.error("to few values in inputfile - maybe add "
                                 "--inventory if you are loading an inventory"
                                 "file from GUI")
                    print_error("to few values in inputfile - maybe add "
                                "--inventory if you are loading an inventory"
                                "file from GUI")
                    logger.error("line %s" % line)
                    print_error("line %s" % line)
                    ret = ret + 1
                    break
                else:
                    logger.error("not all columns in file have value")
                    print_error("not all columns in file have value")
                    logger.error("line %s" % line)
                    print_error("line %s" % line)
                    ret = ret + 1
                    break

            metaref = metalist.get_MetadataId_by_name(lineobj.metaname)
            if metaref is None:
                ret = ret + 1
                continue

            metaobj = metalist.get_by_ref(metaref)

            if metaref not in metacolumn_list:
                logger.debug("reading columns from engine for %s " % lineobj.metaname)
                collist = DxColumnList()
                collist.LoadColumns(metadata_id=metaref)
                metacolumn_list[metaref] = collist

            colref = metacolumn_list[metaref].get_column_id_by_name(
                lineobj.column_name)

            if colref:
                colobj = metacolumn_list[metaref].get_by_ref(colref)
                if lineobj.is_masked_YN == 'Y' or lineobj.is_masked_YN == 'true':
                    is_masked = True
                else:
                    is_masked = False

                if lineobj.algname == '' or lineobj.algname == '-':
                    algname = 'None'
                else:
                    algname = lineobj.algname

                if lineobj.domain_name == '' or lineobj.domain_name == '-':
                    domain_name = 'None'
                else:
                    domain_name = lineobj.domain_name 


                if hasattr(lineobj, 'fieldid'):
                    if lineobj.fieldid == '' or lineobj.fieldid == '-':
                        fieldid = None
                    else:
                        fieldid = lineobj.fieldid 
                else:
                    fieldid = None

                if ruleobj.type == "Database":
                    if lineobj.idmethod == 'Auto':
                        colobj.is_profiler_writable = True
                    elif lineobj.idmethod == 'User':
                        colobj.is_profiler_writable = False
                    else:
                        print_error("Wrong id method")
                        return 1

                if lineobj.dateformat == '-':
                    colobj.date_format = None
                else:
                    colobj.date_format = lineobj.dateformat

                if fieldid is not None:
                    algobj = alg_list.get_by_ref(lineobj.algname)
                    field_id = [ x.field_id for x in algobj.fields if x.name == fieldid ][0]
                    group_id = lineobj.groupid
                else:
                    field_id = None
                    group_id = None


                if mapping is not None and algname != 'None' and algname in mapping:
                    logger.debug("changing a name of algorithm for inventory import: from {} to {}".format(algname, mapping[algname]))
                    algname = mapping[algname]

                ret = ret + update_algorithm(colobj=colobj,
                                             algname=algname,
                                             domainname=domain_name,
                                             metaobj=metaobj,
                                             ruleobj=ruleobj,
                                             is_masked=is_masked,
                                             algorithm_field_id=field_id,
                                             algorithm_group_no=group_id)
            else:
                ret = ret + 1
                continue

    return ret
Exemplo n.º 15
0
def jobs_list_worker(p_engine, p_username,  jobname, envname, p_format, joblist_class):
    """
    Print list of jobs
    param1: p_engine: engine name from configuration
    param2: jobname: job name to list
    param3: envname: environemnt name to list jobs from
    param4: p_format: output format
    param5: joblist_class - DxJobsList, DxProfileJobslist
    return 0 if environment found
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    data = DataFormatter()
    data_header = [
                    ("Engine name", 30),
                    ("Job name", 30),
                    ("Ruleset name", 30),
                    ("Connector name", 30),
                    ("Environment name", 30),
                    ("Completed", 20),
                    ("Status", 20),
                    ("Runtime", 20)
                  ]
    data.create_header(data_header)
    data.format_type = p_format

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue

        # load all objects
        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulesetlist = DxRulesetList(envname)
        connectorlist = DxConnectorsList(envname)
        joblist = globals()[joblist_class]()

        logger.debug("Envname is %s, job name is %s" % (envname, jobname))

        joblist.LoadJobs(envname)
        #rulesetlist.LoadRulesets(envname)
        #connectorlist.LoadConnectors(envname)

        if jobname is None:
            jobs = joblist.get_allref()
        else:
            jobs = joblist.get_all_jobId_by_name(jobname)
            if jobs is None:
                ret = ret + 1
                continue

        for jobref in jobs:
            jobobj = joblist.get_by_ref(jobref)

            rulesetobj = rulesetlist.get_by_ref(jobobj.ruleset_id)
            # those test are requierd for 5.X engies where API is not showing all types of connectors
            if rulesetobj is not None:
                rulename = rulesetobj.ruleset_name
                connectorobj = connectorlist.get_by_ref(rulesetobj.connectorId)
                if connectorobj is not None:
                    connectorname = connectorobj.connector_name
                    envobj = envlist.get_by_ref(connectorobj.environment_id)
                    if envobj is not None:
                        envobjname = envobj.environment_name
                    else:
                         envobjname = "N/A"   
                else:
                    connectorname = "N/A"
                    envobjname = "N/A"
            else:
                rulename = "N/A"
                connectorname = "N/A"
                envobjname = "N/A"

            if jobobj.lastExec is not None:
                status = jobobj.lastExec.status
                if (jobobj.lastExec.end_time is not None) and \
                   (jobobj.lastExec.end_time is not None):
                    endtime = jobobj.lastExec.end_time \
                        .strftime("%Y-%m-%d %H:%M:%S")
                    runtimetemp = jobobj.lastExec.end_time \
                        - jobobj.lastExec.start_time
                    runtime = str(runtimetemp)
                else:
                    endtime = 'N/A'
                    runtime = 'N/A'
            else:
                status = 'N/A'
                endtime = 'N/A'
                runtime = 'N/A'

            data.data_insert(
                              engine_tuple[0],
                              jobobj.job_name,
                              rulename,
                              connectorname,
                              envobjname,
                              endtime,
                              status,
                              runtime
                            )
        print("")
        print (data.data_output(False))
        print("")
        return ret
Exemplo n.º 16
0
def job_add(p_engine, p_username,  params):
    """
    Add masking job to Masking engine
    param1: p_engine: engine name from configuration
    param2: params: job parameters
    return 0 if added, non 0 for error
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine, p_username)

    logger = logging.getLogger()

    envname = params['envname']
    jobname = params['jobname']
    rulesetname = params['rulesetname']

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        joblist = DxJobsList()
        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        logger.debug("Envname is %s, job name is %s" % (envname, jobname))
        rulesetlist = DxRulesetList(envname)


        rulesetref = rulesetlist.get_rulesetId_by_name(rulesetname)

        job = DxJob(engine_obj, None)
        job.create_job(job_name=jobname, ruleset_id=rulesetref)

        for p in optional_params_list:
            if params[p] is not None:
                if params[p] == 'Y':
                    value = True
                elif params[p] == 'N':
                    value = False
                else:
                    value = params[p]
                setattr(job, p, value)


        dmo = DxDatabaseMaskingOptions()

        for p in optional_options_list:
            if params[p] is not None:
                if params[p] == 'Y':
                    value = True
                elif params[p] == 'N':
                    value = False
                else:
                    value = params[p]
                setattr(dmo, p, value)

        if params["on_the_fly_masking"] == 'Y' :
            src_env = params["on_the_fly_src_envname"]
            src_con = params["on_the_fly_src_connector"]
            conlist = DxConnectorsList(src_env)
            conid = conlist.get_connectorId_by_name(src_con)
            if not conid :
                return 1
            on_the_fly_maskking_srcobj = DxOnTheFlyJob()
            on_the_fly_maskking_srcobj.connector_id = conid[1:]

            conObj = conlist.get_by_ref(conid)
            if conObj.is_database :
                on_the_fly_maskking_srcobj.connector_type = "DATABASE"
            else:
                on_the_fly_maskking_srcobj.connector_type = "FILE"
            job.on_the_fly_masking_source = on_the_fly_maskking_srcobj


        if params["prescript"]:
            scriptname = os.path.basename(params["prescript"].name)
            prescript = DxMaskingScriptJob(name=scriptname, contents=''.join(params["prescript"].readlines()))
            dmo.prescript = prescript

        if params["postscript"]:
            scriptname = os.path.basename(params["postscript"].name)
            postscript = DxMaskingScriptJob(name=scriptname, contents = ''.join(params["postscript"].readlines()))
            dmo.postscript = postscript

        job.database_masking_options = dmo

        if joblist.add(job):
            ret = ret + 1

    return ret
Exemplo n.º 17
0
def column_worker(p_engine,
                  sortby,
                  rulesetname,
                  envname,
                  metaname,
                  columnname,
                  filter_algname,
                  filter_is_masked,
                  algname,
                  is_masked,
                  domainname,
                  function_to_call,
                  data=None,
                  inventory=None,
                  **kwargs):
    """
    Select a column using all filter parameters
    and run action defined in function_to_call

    param1: p_engine: engine name from configuration
    param2: sortby: sort by output if needed
    param3: rulesetname: ruleset name
    param4: envname: environment name
    param5: metaname: meta name (table or file)
    param6: columnname: column name (column or field)
    param7: filter_algname: algorithm name to filter
    param8: filter_is_masked: is masked fileter
    param9: algname: new algorithm to set
    param10: is_masked: set masking False/True
    param11: domainname: new domain to set
    param12: function_to_call: function name to call
    param13: data: output object
    return 0 action is processed without issues
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList(envname)
        connlist = DxConnectorsList(envname)
        metalist = DxMetaList()

        rulesetref_list = []

        if rulesetname:
            ruleref = rulelist.get_rulesetId_by_name(rulesetname)
            if ruleref:
                rulesetref_list.append(ruleref)
        else:
            rulesetref_list = rulelist.get_allref()

        for ruleref in rulesetref_list:
            ruleobj = rulelist.get_by_ref(ruleref)
            connobj = connlist.get_by_ref(ruleobj.connectorId)

            if connobj:
                envobj = envlist.get_by_ref(connobj.environment_id)
            else:
                envobj = None

            metalist.LoadMeta(ruleobj.ruleset_id)

            metasetref_list = []

            if metaname:
                metaref = metalist.get_MetadataId_by_name(metaname, 1)
                if metaref:
                    metasetref_list.append(metaref)
            else:
                metasetref_list = metalist.get_allref()

            for metaid in metasetref_list:
                metaobj = metalist.get_by_ref(metaid)
                collist = DxColumnList()
                collist.LoadColumns(metadata_id=metaid,
                                    is_masked=filter_is_masked)

                colsetref_list = []

                colcount = kwargs.get("colcount")
                if colcount is not None:
                    colcount.extend(collist.get_allref())

                if columnname:
                    colref = collist.get_column_id_by_name(columnname)
                    logger.debug("Column ref with name %s : %s" %
                                 (columnname, colref))
                    if colref:
                        colsetref_list.append(colref)
                else:
                    colsetref_list = collist.get_allref()

                logger.debug("List of columns to process : %s" %
                             colsetref_list)

                if filter_algname:
                    colsetref_masked = collist.get_column_id_by_algorithm(
                        filter_algname)
                    logger.debug("List of columns with algorithm %s : %s" %
                                 (filter_algname, colsetref_masked))
                    colsetref_list = list(
                        set(colsetref_list)
                        & set(colsetref_masked))
                    logger.debug("Intersection with column name filter %s" %
                                 colsetref_masked)

                for colref in colsetref_list:
                    colobj = collist.get_by_ref(colref)

                    dynfunc = globals()[function_to_call]
                    ret = ret + dynfunc(data=data,
                                        engine=engine_tuple,
                                        envobj=envobj,
                                        ruleobj=ruleobj,
                                        metaobj=metaobj,
                                        colobj=colobj,
                                        algname=algname,
                                        is_masked=is_masked,
                                        domainname=domainname,
                                        inventory=inventory,
                                        **kwargs)
    return ret
Exemplo n.º 18
0
def do_update(**kwargs):
    jobref = kwargs.get('jobref')
    joblist = kwargs.get('joblist')
    params = kwargs.get('params')

    jobobj = joblist.get_by_ref(jobref)
    update = False

    logger = logging.getLogger()

    if "rulesetname" in params and params['rulesetname'] != None:
        rulesetname = params['rulesetname']
        # as job is in particular environment
        # new ruleset need to be search in same environment
        # job metadata doesn't return environment id so it has to be
        # found by linking old ruleset via connector id to environment
        rulesetlist = DxRulesetList()
        #rulesetlist.LoadRulesets(None)
        connlist = DxConnectorsList()
        connlist.LoadConnectors(None)
        oldrulesetref = jobobj.ruleset_id
        logger.debug("old ruleset %s" % oldrulesetref)
        oldruleobj = rulesetlist.get_by_ref(oldrulesetref)
        oldconnobj = connlist.get_by_ref(oldruleobj.connectorId)
        rulesetlist.LoadRulesetsbyId(oldconnobj.environment_id)
        rulesetref = rulesetlist.get_rulesetId_by_name(rulesetname)
        logger.debug("new ruleset %s" % rulesetref)
        if rulesetref != oldrulesetref:
            update = True
            jobobj.ruleset_id = rulesetref

    if type(jobobj) == dxm.lib.DxJobs.DxJob.DxJob:

        for p in optional_params_list:
            if params[p] is not None:
                update = True
                if params[p] == 'Y':
                    value = True
                elif params[p] == 'N':
                    value = False
                else:
                    value = params[p]
                setattr(jobobj.obj, p, value)

        dmo = jobobj.database_masking_options

        for p in optional_options_list:
            if params[p] is not None:
                update = True
                if params[p] == 'Y':
                    value = True
                elif params[p] == 'N':
                    value = False
                else:
                    value = params[p]
                setattr(dmo, p, value)

        if params["prescript"]=='':
            dmo.prescript = None
        elif params["prescript"]:
            scriptname = os.path.basename(params["prescript"].name)
            prescript = DxMaskingScriptJob(name=scriptname, contents=''.join(params["prescript"].readlines()))
            dmo.prescript = prescript

        if params["postscript"]=='':
            dmo.postscript = None
        if params["postscript"]:
            scriptname = os.path.basename(params["postscript"].name)
            postscript = DxMaskingScriptJob(name=scriptname, contents = ''.join(params["postscript"].readlines()))
            dmo.postscript = postscript
    else:

        if "profilename" in params and params['rulesetname'] != None:
            profilename = params['profilename']

            oldprofile = jobobj.profile_set_id
            logger.debug("old profile %s" % oldprofile)
            profilelist = DxProfilesList()
            profileref = profilelist.get_profileSetId_by_name(profilename)
            logger.debug("new profile %s" % profileref)
            if profileref != oldprofile:
                update = True
                jobobj.profile_set_id = profileref

        for p in masking_params_list:
            if params[p] is not None:
                update = True
                if params[p] == 'Y':
                    value = True
                elif params[p] == 'N':
                    value = False
                else:
                    value = params[p]
                setattr(jobobj.obj, p, value)

    if update:
        return jobobj.update()
    else:
        print_message('Nothing to update')
        return 1
Exemplo n.º 19
0
def sync_worker(p_engine, objecttype, objectname, envname,
                function_to_call, **kwargs):
    """
    Run an action for list of syncable objects
    param1: p_engine: engine name from configuration
    param2: objecttype: objecttype to list, all if None
    param3: objectname: objectname to list_table_details
    param4: function_to_call
    return 0 if objecttype found
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine)

    # objectname = "RandomValueLookup"
    # objectname = None
    ret = 0

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        synclist = DxSyncList(objecttype)

        if (objecttype is None or objecttype == "algorithm") \
           and envname is None:
            if objectname:
                alglist = [objectname]
            else:
                alglist = synclist.get_all_algorithms()

            for syncref in alglist:
                syncobj = synclist.get_object_by_type_name(
                                        "algorithm", syncref)
                if syncobj:

                    dynfunc = globals()[function_to_call]
                    ret = ret + dynfunc(
                        object=syncobj,
                        engine_obj=engine_obj,
                        envname='global',
                        name=syncref, **kwargs)

        if objecttype is None or objecttype == "database_connector" \
           or objecttype == "file_connector":

            envlist = DxEnvironmentList()
            connlist = DxConnectorsList(envname)

            if objecttype is None:
                objtypelist = ["database_connector", "file_connector"]
            else:
                objtypelist = [objecttype]

            for objtype in objtypelist:

                if objectname:
                    connbynameref = connlist.get_connectorId_by_name(
                                        objectname, False)
                    if connbynameref:
                        syncconnref = int(connbynameref[1:])
                        if synclist.get_object_by_type_name(
                                                objtype,
                                                syncconnref):
                            connrefs = [syncconnref]
                        else:
                            connrefs = []
                    else:
                        connrefs = []
                else:
                    connrefs = synclist.get_all_object_by_type(objtype)

                for syncref in connrefs:
                    syncobj = synclist.get_object_by_type_name(
                                        objtype, syncref)
                    if syncobj.object_type == 'DATABASE_CONNECTOR':
                        connobj = connlist.get_by_ref("d" + str(syncref))
                    else:
                        connobj = connlist.get_by_ref("f" + str(syncref))

                    if connobj is None:
                        # limited by env
                        continue
                    envobj = envlist.get_by_ref(connobj.environment_id)

                    dynfunc = globals()[function_to_call]
                    ret = ret + dynfunc(
                        object=syncobj,
                        engine_obj=engine_obj,
                        envname=envobj.environment_name,
                        name=connobj.connector_name,
                        **kwargs)

        if objecttype is None or objecttype == "database_ruleset" \
           or objecttype == "file_ruleset":

            envlist = DxEnvironmentList()
            connlist = DxConnectorsList(envname)
            rulesetList = DxRulesetList(envname)

            if objecttype is None:
                objtypelist = ["database_ruleset", "file_ruleset"]
            else:
                objtypelist = [objecttype]

            for objtype in objtypelist:

                if objectname:
                    rulesetrefs = []
                    rulesetref = rulesetList.get_all_rulesetId_by_name(
                                                objectname)
                    if rulesetref:
                        for rsref in rulesetref:
                            if synclist.get_object_by_type_name(
                                                objtype, rsref):
                                rulesetrefs.append(rsref)
                            else:
                                rulesetrefs = []
                    else:
                        rulesetrefs = []
                else:
                    rulesetrefs = synclist.get_all_object_by_type(objtype)

                for syncref in rulesetrefs:
                    syncobj = synclist.get_object_by_type_name(objtype,
                                                               syncref)
                    rulesetobj = rulesetList.get_by_ref(syncref)
                    if rulesetobj is None:
                        # limited by env
                        continue
                    connobj = connlist.get_by_ref(rulesetobj.connectorId)
                    envobj = envlist.get_by_ref(connobj.environment_id)
                    dynfunc = globals()[function_to_call]
                    ret = ret + dynfunc(
                        object=syncobj,
                        engine_obj=engine_obj,
                        envname=envobj.environment_name,
                        name=rulesetobj.ruleset_name,
                        **kwargs)

        if (objecttype is None or objecttype == "global_object"
           or objecttype == "key" or objecttype == "domain") \
           and envname is None:

            if objecttype is None:
                objtypelist = ["global_object", "key", "domain"]
            else:
                objtypelist = [objecttype]

            for objtype in objtypelist:
                if objectname:
                    objlist = [objectname]
                else:
                    objlist = synclist.get_all_object_by_type(objtype)
                for syncref in objlist:
                    syncobj = synclist.get_object_by_type_name(objtype,
                                                               syncref)
                    if syncobj:

                        dynfunc = globals()[function_to_call]
                        ret = ret + dynfunc(
                            object=syncobj,
                            engine_obj=engine_obj,
                            envname='global',
                            name=syncref, **kwargs)

        if objecttype is None or objecttype == "masking_job":

            envlist = DxEnvironmentList()
            joblist = DxJobsList()
            joblist.LoadJobs(envname)
            connlist = DxConnectorsList(envname)
            rulesetlist = DxRulesetList(envname)

            if objectname:
                jobref = joblist.get_jobId_by_name(objectname)
                if synclist.get_object_by_type_name("masking_job", jobref):
                    jobrefs = [jobref]
                else:
                    jobrefs = []
            else:
                jobrefs = synclist.get_all_object_by_type("masking_job")

            for syncref in jobrefs:
                syncobj = synclist.get_object_by_type_name("masking_job",
                                                           syncref)
                jobobj = joblist.get_by_ref(syncref)
                if envname and jobobj is None:
                    # limited by env
                    continue
                rulesetobj = rulesetlist.get_by_ref(jobobj.ruleset_id)
                connectorobj = connlist.get_by_ref(rulesetobj.connectorId)
                envobj = envlist.get_by_ref(connectorobj.environment_id)
                dynfunc = globals()[function_to_call]
                ret = ret + dynfunc(
                    object=syncobj,
                    engine_obj=engine_obj,
                    envname=envobj.environment_name,
                    name=jobobj.job_name,
                    **kwargs)

    return ret
Exemplo n.º 20
0
def jobs_report_worker(p_engine, p_username,  jobname, envname, p_format, last, startdate, enddate, details, jobtype='masking'):
    """
    Print report of jobs
    param1: p_engine: engine name from configuration
    param2: jobname: job name to list
    param3: envname: environemnt name to list jobs from
    param4: p_format: output format
    param5: last: display last job only
    param6: startdate: filter by start date
    param7: enddate: filter by end date
    param8: details
    param9: joblist_class - DxJobsList, DxProfileJobslist
    return 0 if environment found
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    data = DataFormatter()

    if jobtype == 'masking':
    
        if details:
            data_header = [
                    ("Engine name", 30),
                    ("Environment name", 30),
                    ("Job name", 30),  
                    ("ExecId", 6),               
                    ("Meta name", 12),
                    ("Masked Rows", 10),   
                    ("Started", 20),                                                              
                    ("Completed", 20),
                    ("Status", 20),
                    ("Runtime", 20)                  
                    ]
        else:
            data_header = [
                            ("Engine name", 30),
                            ("Environment name", 30),
                            ("Job name", 30),
                            ("Job Id", 6),                    
                            ("Min Memory", 10),
                            ("Max Memory", 10), 
                            ("Streams", 7),                                              
                            ("On The Fly", 10),
                            ("Ruleset Type", 12),
                            ("ExecId", 6),
                            ("Total Rows", 10),
                            ("Masked Rows", 10),   
                            ("Started", 20),                                                              
                            ("Completed", 20),
                            ("Status", 20),
                            ("Runtime", 20)                  
                        ]

    else:
        data_header = [
                ("Engine name", 30),
                ("Environment name", 30),
                ("Job name", 30),  
                ("Ruleset Type", 12),
                ("ExecId", 6),                 
                ("Started", 20),                                                              
                ("Completed", 20),
                ("Status", 20),
                ("Runtime", 20)                  
                ]


    data.create_header(data_header)
    data.format_type = p_format

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue

        # load all objects
        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulesetlist = DxRulesetList(envname)
        connectorlist = DxConnectorsList(envname)
        if jobtype == 'masking':
            joblist = DxJobsList()
        else:
            joblist = DxProfileJobsList()

        logger.debug("Envname is %s, job name is %s" % (envname, jobname))

        joblist.LoadJobs(envname)
        #rulesetlist.LoadRulesets(envname)
        #connectorlist.LoadConnectors(envname)

        if jobname is None:
            jobs = joblist.get_allref()
        else:
            jobs = joblist.get_all_jobId_by_name(jobname)
            if jobs is None:
                ret = ret + 1
                continue

        for jobref in jobs:
            jobobj = joblist.get_by_ref(jobref)

            rulesetobj = rulesetlist.get_by_ref(jobobj.ruleset_id)
            # those test are requierd for 5.X engies where API is not showing all types of connectors
            if rulesetobj is not None:
                ruleset_type = rulesetobj.type
                rulename = rulesetobj.ruleset_name
                connectorobj = connectorlist.get_by_ref(rulesetobj.connectorId)
                if connectorobj is not None:
                    connectorname = connectorobj.connector_name
                    envobj = envlist.get_by_ref(connectorobj.environment_id)
                    if envobj is not None:
                        envobjname = envobj.environment_name
                    else:
                        envobjname = "N/A"   
                else:
                    connectorname = "N/A"
                    envobjname = "N/A"
            else:
                rulename = "N/A"
                connectorname = "N/A"
                envobjname = "N/A"
                ruleset_type = "N/A"


            if last:
                lastonly = True
            else:
                lastonly = False


            if lastonly:
                execlist = [ jobobj.lastExec ]
            else:
                if startdate or enddate:
                    execlist = jobobj.filter_executions(startdate, enddate)
                else:
                    execlist = jobobj.execList

            if execlist:

                for jobexec in execlist:



                    if details == False:

                        if jobtype == 'masking':

                            if jobexec is not None:
                                status = jobexec.status
                                execid = jobexec.execution_id
                                rowsmasked = jobexec.rows_masked
                                rowstotal = jobexec.rows_total
                                if jobexec.start_time is not None:
                                    starttime = jobexec.start_time.strftime("%Y-%m-%d %H:%M:%S")
                                else:
                                    starttime = 'N/A'
                                if (jobexec.end_time is not None) and \
                                (jobexec.start_time is not None):
                                    endtime = jobexec.end_time \
                                        .strftime("%Y-%m-%d %H:%M:%S")
                                    runtimetemp = jobexec.end_time \
                                        - jobexec.start_time
                                    runtime = str(runtimetemp)
                                else:
                                    endtime = 'N/A'
                                    runtime = 'N/A'
                            else:
                                status = 'N/A'
                                endtime = 'N/A'
                                starttime = 'N/A'
                                runtime = 'N/A'
                                execid = 'N/A'
                                rowsmasked = 'N/A'
                                rowstotal = 'N/A'

                            data.data_insert(
                                            engine_tuple[0],
                                            envobjname,
                                            jobobj.job_name,
                                            jobobj.masking_job_id,
                                            jobobj.min_memory,
                                            jobobj.max_memory,
                                            jobobj.num_input_streams,
                                            jobobj.on_the_fly_masking,
                                            ruleset_type,
                                            execid,
                                            rowstotal,
                                            rowsmasked,
                                            starttime,
                                            endtime,
                                            status,
                                            runtime
                                            )
                        else:

                            if jobexec is not None:
                                status = jobexec.status
                                execid = jobexec.execution_id
                                if jobexec.start_time is not None:
                                    starttime = jobexec.start_time.strftime("%Y-%m-%d %H:%M:%S")
                                else:
                                    starttime = 'N/A'
                                if (jobexec.end_time is not None) and \
                                (jobexec.start_time is not None):
                                    endtime = jobexec.end_time \
                                        .strftime("%Y-%m-%d %H:%M:%S")
                                    runtimetemp = jobexec.end_time \
                                        - jobexec.start_time
                                    runtime = str(runtimetemp)
                                else:
                                    endtime = 'N/A'
                                    runtime = 'N/A'
                            else:
                                status = 'N/A'
                                endtime = 'N/A'
                                starttime = 'N/A'
                                runtime = 'N/A'
                                execid = 'N/A'


                            data.data_insert(
                                            engine_tuple[0],
                                            envobjname,
                                            jobobj.job_name,
                                            ruleset_type,
                                            execid,
                                            starttime,
                                            endtime,
                                            status,
                                            runtime
                                            )

                    else:
                        # details here      
                        if jobexec is not None:         
                            execid = jobexec.execution_id
                            complist = jobobj.list_execution_component(execid)

                            if complist is not None:
                                for comp in complist:    
                                    status = comp.status
                                    rowsmasked = comp.rows_masked
                                    metaname = comp.component_name
                                    if comp.start_time is not None:
                                        starttime = comp.start_time.strftime("%Y-%m-%d %H:%M:%S")
                                    else:
                                        starttime = 'N/A'
                                    if (comp.end_time is not None) and \
                                    (comp.start_time is not None):
                                        endtime = comp.end_time \
                                            .strftime("%Y-%m-%d %H:%M:%S")
                                        runtimetemp = comp.end_time \
                                            - comp.start_time
                                        runtime = str(runtimetemp)
                                    else:
                                        endtime = 'N/A'
                                        runtime = 'N/A'

                                    data.data_insert(
                                                    engine_tuple[0],
                                                    envobjname,
                                                    jobobj.job_name,
                                                    execid,
                                                    metaname,
                                                    rowsmasked,
                                                    starttime,
                                                    endtime,
                                                    status,
                                                    runtime
                                                    )
                        else:
                            print("setting 1")
                            ret = 1


                    
                else:
                    # no executions
                    ret = 1

            else:
                # no executions
                ret = 1


        print("")
        print (data.data_output(False))
        print("")
        return ret
Exemplo n.º 21
0
def column_batch(p_engine, rulesetname, envname, inputfile):
    """
    Update all columns defined in file
    param1: p_engine: engine name from configuration
    param2: rulesetname: ruleset name
    param3: envname: environment name
    param4: inputfile: file handler with entries
    return 0 if all rows processed without issues
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
                                     engine_tuple[2], engine_tuple[3])

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        metalist = DxMetaList()

        ruleref = rulelist.get_rulesetId_by_name(rulesetname)
        if ruleref:
            ruleobj = rulelist.get_by_ref(ruleref)
        else:
            return 1

        metalist.LoadMeta(ruleobj.ruleset_id)

        metacolumn_list = {}

        for line in inputfile:
            if line.startswith('#'):
                continue
            try:
                logger.debug("readling line %s" % line)
                if ruleobj.type == "Database":
                    (metaname, column_role, parent_column, column_name, type,
                     domain_name, algname,
                     is_masked_YN) = line.strip().split(',')
                else:
                    (metaname, column_name, domain_name, algname,
                     is_masked_YN) = line.strip().split(',')

            except ValueError:
                logger.error("not all columns in file have value")
                print_error("not all columns in file have value")
                logger.error("line %s" % line)
                print_error("line %s" % line)
                ret = ret + 1
                continue

            metaref = metalist.get_MetadataId_by_name(metaname)
            if metaref is None:
                ret = ret + 1
                continue

            metaobj = metalist.get_by_ref(metaref)

            if metaref not in metacolumn_list:
                logger.debug("reading columns from engine for %s " % metaname)
                collist = DxColumnList()
                collist.LoadColumns(metadata_id=metaref)
                metacolumn_list[metaref] = collist

            colref = metacolumn_list[metaref].get_column_id_by_name(
                column_name)

            if colref:
                colobj = metacolumn_list[metaref].get_by_ref(colref)
                if is_masked_YN == 'Y':
                    is_masked = True
                else:
                    is_masked = False

                if algname == '':
                    algname = 'None'

                if domain_name == '':
                    domain_name = 'None'

                update_algorithm(colobj=colobj,
                                 algname=algname,
                                 domainname=domain_name,
                                 metaobj=metaobj,
                                 ruleobj=ruleobj,
                                 is_masked=is_masked)
            else:
                ret = ret + 1
                continue

    return ret