Exemple #1
0
def do_start(**kwargs):
    """
    Start job
    """
    jobref = kwargs.get('jobref')
    joblist = kwargs.get('joblist')
    tgt_connector = kwargs.get('tgt_connector')
    tgt_connector_env = kwargs.get('tgt_connector_env')
    nowait = kwargs.get('nowait')
    posno = kwargs.get('posno')
    lock = kwargs.get('lock')
    monitor = kwargs.get('monitor')
    joblist_class = kwargs.get('joblist_class')

    jobobj = joblist.get_by_ref(jobref)

    targetconnector = None

    if jobobj.multi_tenant:
        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        

        if tgt_connector is None:
            print_error("Target connector is required for multitenant job")
            lock.acquire()
            if joblist_class == "DxJobsList":
                dxm.lib.DxJobs.DxJobCounter.ret = \
                    dxm.lib.DxJobs.DxJobCounter.ret + 1
            else:
                dxm.lib.DxJobs.DxJobCounter.profileret = \
                    dxm.lib.DxJobs.DxJobCounter.profileret + 1
            lock.release()
            return 1

        connectorlist = DxConnectorsList(tgt_connector_env)
        #connectorlist.LoadConnectors()
        targetconnector = connectorlist.get_connectorId_by_name(
                            tgt_connector)
        if targetconnector:
            targetconnector = targetconnector[1:]
        else:
            print_error("Target connector for multitenant job not found")
            lock.acquire()
            if joblist_class == "DxJobsList":
                dxm.lib.DxJobs.DxJobCounter.ret = \
                    dxm.lib.DxJobs.DxJobCounter.ret + 1
            else:
                dxm.lib.DxJobs.DxJobCounter.profileret = \
                    dxm.lib.DxJobs.DxJobCounter.profileret + 1
            lock.release()
            return 1

    #staring job
    jobobj.monitor = monitor
    return jobobj.start(targetconnector, None, nowait, posno, lock)
Exemple #2
0
def ruleset_add(p_engine, p_username, rulesetname, connectorname, envname):
    """
    Add ruleset to Masking engine
    param1: p_engine: engine name from configuration
    param2: rulesetname: ruleset name
    param3: connectorname: connectorname name
    param4: envname: environment name
    return 0 if added, non 0 for error
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        connlist = DxConnectorsList(envname)
        #connlist.LoadConnectors()
        logger.debug("Connector is %s" % connectorname)
        connref = connlist.get_connectorId_by_name(connectorname)
        connobj = connlist.get_by_ref(connref)
        if connobj:
            if connobj.is_database:
                ruleset = DxDatabaseRuleset(engine_obj)
                ruleset.create_database_ruleset(
                    ruleset_name=rulesetname,
                    database_connector_id=connobj.connectorId,
                    refresh_drops_tables=None)
            else:
                ruleset = DxFileRuleset(engine_obj)
                ruleset.create_file_ruleset(
                    ruleset_name=rulesetname,
                    file_connector_id=connobj.connectorId)

            if rulelist.add(ruleset):
                ret = ret + 1
        else:
            ret = ret + 1

    return ret
Exemple #3
0
    def addmeta(self, file_params):
        """
        Add file to ruleset
        :param file_params: set of file parametes
        return a None if non error
        return 1 in case of error
        """

        filename = file_params["metaname"]
        file_format = file_params["file_format"]
        regular = file_params["file_name_regex"]
        delimiter = file_params["file_delimiter"]
        eor = file_params["file_eor"]
        enclosure = file_params["file_enclosure"]

        connlist = DxConnectorsList()
        #connlist.LoadConnectors(None)

        connobj = connlist.get_by_ref(self.connectorId)

        if filename is None:
            print_error("File name is required")
            self.__logger.error("File name is required")
            return 1

        if eor == 'custom':
            if file_params["file_eor_custom"]:
                eor_string = file_params["file_eor_custom"]
            else:
                print_error("Custom End of record is unknown")
                self.__logger.error("Custom End of record is unknown")
                return 1
        else:
            eor_string = eor

        if enclosure:
            enclosure = enclosure.strip()

        file = DxFile(self.__engine)
        file.create_file(file_name=filename,
                         ruleset_id=self.ruleset_id,
                         file_type=connobj.connector_type,
                         file_format_id=file_format,
                         delimiter=delimiter,
                         end_of_record=eor_string,
                         enclosure=enclosure,
                         name_is_regular_expression=regular)
        return file.add()
Exemple #4
0
def do_check(**kwargs):
    """
    Compare
    """

    ruleref = kwargs.get('ruleref')
    rulelist = kwargs.get('rulelist')
    envlist = DxEnvironmentList
    envname = kwargs.get('envname')
    ruleset = kwargs.get('ruleset')
    rulesetname = kwargs.get('rulesetname')
    p_engine = kwargs.get('p_engine')

    connname = ruleset["Connector name"]

    ruleobj = rulelist.get_by_ref(ruleref)
    connobj = DxConnectorsList.get_by_ref(ruleobj.connectorId)

    if connobj:
        envobj = envlist.get_by_ref(connobj.environment_id)
        connector_name = connobj.connector_name
        environment_name = envobj.environment_name
    else:
        connector_name = 'N/A'
        environment_name = 'N/A'

    retcol = 0

    metalist = DxMetaList()
    metalist.LoadMeta(ruleobj.ruleset_id)

    rettab = 0

    for meta in ruleset["Metadata"]:
        metalist_ref = metalist.get_MetadataId_by_name(meta["meta_name"], 1)
        if metalist_ref:
            rettab = rettab + 1
        else:
            print_error("Missing meta %s" % meta["meta_name"])

    for col in ruleset["Columns"]:
        count = [
            x for x in ruleset["Columns"]
            if col["Metadata name"] == x["Metadata name"]
        ]
        rc = column_check(p_engine, rulesetname, envname, col, len(count))
        if rc != 0:
            retcol = retcol + 1


    if (ruleobj.ruleset_name == rulesetname) and \
       (connector_name == connname) and \
       (environment_name == envname) and \
       (retcol == 0) and \
       (rettab == len(ruleset["Metadata"])):
        print_message("Ruleset definition in engine is matching import file")
        return 0
    else:
        print_error("There are difference between engine and import file")
        return 1
Exemple #5
0
def ruleset_list_worker(**kwargs):
    """
    Print list of ruleset by ruleset name or environment name
    param1: p_engine: engine name from configuration
    param2: format: output format
    param2: ruleset: name of ruleset to display
    param3: envname: name of environment to list ruleset
    return 0 if environment found
    """

    p_engine = kwargs.get('p_engine')
    format = kwargs.get('format')
    rulesetName = kwargs.get('rulesetName')
    envname = kwargs.get('envname')
    function_to_call = kwargs.get('function_to_call')
    data = kwargs.get('data')

    ret = 0

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue

        #envlist = DxEnvironmentList()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)

        if rulesetName is None:
            rulesets = rulelist.get_allref()
            if len(rulesets) == 0:
                ret = ret + 1
                continue
        else:
            rulesets = rulelist.get_all_rulesetId_by_name(rulesetName)
            if rulesets is None:
                ret = ret + 1
                continue

        # connlist = DxConnectorsList(envname)
        # connlist.LoadConnectors(envname)

        for ruleid in rulesets:
            ruleobj = rulelist.get_by_ref(ruleid)
            connobj = DxConnectorsList.get_by_ref(ruleobj.connectorId)

            dynfunc = globals()[function_to_call]
            if dynfunc(ruleobj=ruleobj, connobj=connobj,
                       # envlist=envlist,
                       engine_obj=engine_obj, **kwargs):
                ret = ret + 1
                continue

    return ret
Exemple #6
0
def connector_selector(p_engine,
                       connectorname,
                       envname,
                       function_to_call,
                       format='fixed'):
    """
    Select unique connector from Masking engine and run function on it
    param1: p_engine: engine name from configuration
    param2: connectorname: connectorname name
    param3: envname: environment name
    param4: function_to_call: name of function to call on connector
    param5: format: format of output, set to fixed
    return 0 if added, non 0 for error
    """

    ret = 0
    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        connlist = DxConnectorsList()
        connlist.LoadConnectors(envname)

        connref = connlist.get_connectorId_by_name(connectorname)

        if connref:
            dynfunc = globals()[function_to_call]
            ret = ret + dynfunc(connref=connref,
                                engine_obj=engine_obj,
                                connlist=connlist,
                                format=format)
        else:
            ret = ret + 1
            continue

    return ret
    def addmetafromfetch(self, fetchfilter, bulk):
        """
        Add tables from fetch into ruleset
        :param fetchfilter: filter for tables
        return a 0 if non error
        return 1 in case of error
        """

        connobj = DxConnectorsList.get_by_ref(self.connectorId)
        table_list = []
        ret = 0

        if fetchfilter:
            fetchfilter = re.escape(fetchfilter).replace("\*", ".*")
            self.logger.debug("fetchfilter {}".format(fetchfilter))
            pattern = re.compile(r'^{}$'.format(fetchfilter))

        for table in connobj.fetch_meta():
            params = {
                "metaname": table,
                "custom_sql": None,
                "where_clause": None,
                "having_clause": None,
                "key_column": None
            }

            self.logger.debug("checking table {}".format(table))

            if fetchfilter:
                if pattern.search(table):
                    self.logger.debug(
                        "filtered table added to bulk{}".format(table))
                    if bulk:
                        table_list.append(params)
                    else:
                        ret = ret + self.addmeta(params)
            else:
                if bulk:
                    table_list.append(params)
                else:
                    ret = ret + self.addmeta(params)

        # TODO:
        # add check of version of fail before trying

        if bulk:
            ret = self.addmeta_bulk(table_list)

        return ret
Exemple #8
0
    def addmetafromfetch(self, fetchfilter, bulk):
        """
        Add tables from fetch into ruleset
        :param fetchfilter: filter for tables
        return a 0 if non error
        return 1 in case of error
        """

        connobj = DxConnectorsList.get_by_ref(self.connectorId)
        table_list = []
        ret = 0

        if bulk:
            print_error("Bulk option is not supported for files")
            return 1

        if fetchfilter:
            fetchfilter = re.escape(fetchfilter).replace("\*", ".*")
            self.logger.debug("fetchfilter {}".format(fetchfilter))
            pattern = re.compile(r'^{}$'.format(fetchfilter))

        for table in connobj.fetch_meta():
            params = {
                "metaname": table,
                "file_name_regex": None,
                "file_format": None,
                "file_delimiter": None,
                "file_eor": None,
                "file_enclosure": None
            }

            self.logger.debug("checking file {}".format(table))

            if fetchfilter:
                if pattern.search(table):
                    self.logger.debug(
                        "filtered file added to bulk{}".format(table))
                    ret = ret + self.addmeta(params)
            else:
                if bulk:
                    table_list.append(params)
                else:
                    ret = ret + self.addmeta(params)

        return ret
Exemple #9
0
def do_export(**kwargs):
    """
    Export ruleset into external object
    """

    ruleref = kwargs.get('ruleref')
    rulelist = kwargs.get('rulelist')
    exportout = kwargs.get('exportout')
    exportmeta = kwargs.get('exportmeta')
    metaname = kwargs.get('metaname')
    engine_obj = kwargs.get('engine_obj')
    envlist = DxEnvironmentList

    ruleobj = rulelist.get_by_ref(ruleref)
    connobj = DxConnectorsList.get_by_ref(ruleobj.connectorId)

    logger = logging.getLogger()

    ret = 0

    if connobj:
        envobj = envlist.get_by_ref(connobj.environment_id)
        connector_name = connobj.connector_name
        environment_name = envobj.environment_name
    else:
        connector_name = 'N/A'
        environment_name = None

    ruleset = {
        "Ruleset name": ruleobj.ruleset_name,
        "Connector name": connector_name,
        "Environent name": environment_name}

    if exportmeta == 'Y':
        metadatalist = []
        metalist = DxMetaList()
        metalist.LoadMeta(ruleobj.ruleset_id)

        if metaname:
            metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1)
            if metalist_ref is None:
                logger.error("no meta %s found" % metaname)
                return 1
        else:
            metalist_ref = metalist.get_allref()
            if metalist_ref is None:
                logger.error("no meta data found")
                return 1

        for metaid in metalist_ref:
            metaobj = metalist.get_by_ref(metaid)

            if connobj.is_database:
                tabhash = {
                  "table": True,
                  "meta_name": metaobj.meta_name,
                  "key_column": metaobj.key_column,
                  "where_clause": repr(metaobj.where_clause),
                  "custom_sql": repr(metaobj.custom_sql)
                }
            else:
                if metaobj.file_format_id is not None:
                    filetypelist = DxFileFormatList()
                    fileformatobj = filetypelist.get_by_ref(
                        metaobj.file_format_id)
                    fileformatname = fileformatobj.file_format_name
                else:
                    fileformatname = 'N/A'

                tabhash = {
                  "table": False,
                  "meta_name": metaobj.meta_name,
                  "file_format": fileformatname,
                  "file_delimiter": metaobj.delimiter,
                  "file_eor": metaobj.end_of_record,
                  "file_enclosure": metaobj.enclosure,
                  "file_name_regex": metaobj.name_is_regular_expression
                }

            metadatalist.append(tabhash)

        ruleset["Metadata"] = metadatalist
        columndata = column_export(
                        engine_obj.get_name(), None, ruleobj.ruleset_name,
                        environment_name, metaname, None, None)
        ruleset["Columns"] = json.loads(columndata.data_output(False))
    else:
        ruleset["Metadata"] = []
        ruleset["Columns"] = []

    exportout.append(ruleset)
    return ret
Exemple #10
0
def do_update(**kwargs):
    jobref = kwargs.get('jobref')
    joblist = kwargs.get('joblist')
    params = kwargs.get('params')

    jobobj = joblist.get_by_ref(jobref)
    update = False

    logger = logging.getLogger()

    if "rulesetname" in params and params['rulesetname'] != None:
        rulesetname = params['rulesetname']
        # as job is in particular environment
        # new ruleset need to be search in same environment
        # job metadata doesn't return environment id so it has to be
        # found by linking old ruleset via connector id to environment
        rulesetlist = DxRulesetList()
        #rulesetlist.LoadRulesets(None)
        connlist = DxConnectorsList()
        connlist.LoadConnectors(None)
        oldrulesetref = jobobj.ruleset_id
        logger.debug("old ruleset %s" % oldrulesetref)
        oldruleobj = rulesetlist.get_by_ref(oldrulesetref)
        oldconnobj = connlist.get_by_ref(oldruleobj.connectorId)
        rulesetlist.LoadRulesetsbyId(oldconnobj.environment_id)
        rulesetref = rulesetlist.get_rulesetId_by_name(rulesetname)
        logger.debug("new ruleset %s" % rulesetref)
        if rulesetref != oldrulesetref:
            update = True
            jobobj.ruleset_id = rulesetref

    if type(jobobj) == dxm.lib.DxJobs.DxJob.DxJob:

        for p in optional_params_list:
            if params[p] is not None:
                update = True
                if params[p] == 'Y':
                    value = True
                elif params[p] == 'N':
                    value = False
                else:
                    value = params[p]
                setattr(jobobj.obj, p, value)

        dmo = jobobj.database_masking_options

        for p in optional_options_list:
            if params[p] is not None:
                update = True
                if params[p] == 'Y':
                    value = True
                elif params[p] == 'N':
                    value = False
                else:
                    value = params[p]
                setattr(dmo, p, value)

        if params["prescript"]=='':
            dmo.prescript = None
        elif params["prescript"]:
            scriptname = os.path.basename(params["prescript"].name)
            prescript = DxMaskingScriptJob(name=scriptname, contents=''.join(params["prescript"].readlines()))
            dmo.prescript = prescript

        if params["postscript"]=='':
            dmo.postscript = None
        if params["postscript"]:
            scriptname = os.path.basename(params["postscript"].name)
            postscript = DxMaskingScriptJob(name=scriptname, contents = ''.join(params["postscript"].readlines()))
            dmo.postscript = postscript
    else:

        if "profilename" in params and params['rulesetname'] != None:
            profilename = params['profilename']

            oldprofile = jobobj.profile_set_id
            logger.debug("old profile %s" % oldprofile)
            profilelist = DxProfilesList()
            profileref = profilelist.get_profileSetId_by_name(profilename)
            logger.debug("new profile %s" % profileref)
            if profileref != oldprofile:
                update = True
                jobobj.profile_set_id = profileref

        for p in masking_params_list:
            if params[p] is not None:
                update = True
                if params[p] == 'Y':
                    value = True
                elif params[p] == 'N':
                    value = False
                else:
                    value = params[p]
                setattr(jobobj.obj, p, value)

    if update:
        return jobobj.update()
    else:
        print_message('Nothing to update')
        return 1
Exemple #11
0
def job_add(p_engine, p_username,  params):
    """
    Add masking job to Masking engine
    param1: p_engine: engine name from configuration
    param2: params: job parameters
    return 0 if added, non 0 for error
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine, p_username)

    logger = logging.getLogger()

    envname = params['envname']
    jobname = params['jobname']
    rulesetname = params['rulesetname']

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        joblist = DxJobsList()
        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        logger.debug("Envname is %s, job name is %s" % (envname, jobname))
        rulesetlist = DxRulesetList(envname)


        rulesetref = rulesetlist.get_rulesetId_by_name(rulesetname)

        job = DxJob(engine_obj, None)
        job.create_job(job_name=jobname, ruleset_id=rulesetref)

        for p in optional_params_list:
            if params[p] is not None:
                if params[p] == 'Y':
                    value = True
                elif params[p] == 'N':
                    value = False
                else:
                    value = params[p]
                setattr(job, p, value)


        dmo = DxDatabaseMaskingOptions()

        for p in optional_options_list:
            if params[p] is not None:
                if params[p] == 'Y':
                    value = True
                elif params[p] == 'N':
                    value = False
                else:
                    value = params[p]
                setattr(dmo, p, value)

        if params["on_the_fly_masking"] == 'Y' :
            src_env = params["on_the_fly_src_envname"]
            src_con = params["on_the_fly_src_connector"]
            conlist = DxConnectorsList(src_env)
            conid = conlist.get_connectorId_by_name(src_con)
            if not conid :
                return 1
            on_the_fly_maskking_srcobj = DxOnTheFlyJob()
            on_the_fly_maskking_srcobj.connector_id = conid[1:]

            conObj = conlist.get_by_ref(conid)
            if conObj.is_database :
                on_the_fly_maskking_srcobj.connector_type = "DATABASE"
            else:
                on_the_fly_maskking_srcobj.connector_type = "FILE"
            job.on_the_fly_masking_source = on_the_fly_maskking_srcobj


        if params["prescript"]:
            scriptname = os.path.basename(params["prescript"].name)
            prescript = DxMaskingScriptJob(name=scriptname, contents=''.join(params["prescript"].readlines()))
            dmo.prescript = prescript

        if params["postscript"]:
            scriptname = os.path.basename(params["postscript"].name)
            postscript = DxMaskingScriptJob(name=scriptname, contents = ''.join(params["postscript"].readlines()))
            dmo.postscript = postscript

        job.database_masking_options = dmo

        if joblist.add(job):
            ret = ret + 1

    return ret
Exemple #12
0
def connector_add(p_engine, params):
    """
    Add application to Masking engine
    param1: p_engine: engine name from configuration
    param2: params: dict of parameters needed for connector to add
    return 0 if added, non 0 for error
    """

    ret = 0
    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    envname = params['envname']
    schemaName = params['schemaName']
    host = params['host']
    port = params['port']
    password = params['password']
    username = params['username']
    connname = params['connname']

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        logger.debug("Envname is %s" % envname)
        envref = envlist.get_environmentId_by_name(envname)

        if envref is None:
            ret = ret + 1
            continue

        connlist = DxConnectorsList()
        if params['type'] in database_types:
            if params['type'] == 'oracle':
                connobj = OracleConnector(engine_obj)
            elif params['type'] == 'mssql':
                connobj = MSSQLConnector(engine_obj)
            elif params['type'] == 'sybase':
                connobj = SybaseConnector(engine_obj)
            else:
                connobj = DxConnector(engine_obj)
                connobj.database_type = params['type'].upper()

            connobj.connector_name = connname
            connobj.schema_name = schemaName
            connobj.username = username
            connobj.password = password
            connobj.host = host
            if port:
                connobj.port = port + 0
            connobj.sid = params['sid']
            connobj.jdbc = params['jdbc']
            connobj.environment_id = envref
            connobj.instance_name = params['instancename']
            connobj.database_name = params['databasename']

        elif params['type'] in file_types:
            path = params['path']
            connmode = params['servertype']
            connobj = DxFileConnector(engine_obj)
            connobj.is_database = False
            connobj.connector_name = connname
            connobj.environment_id = envref
            connobj.file_type = params['type'].upper()
            ci = ConnectionInfo()
            ci.host = host
            ci.port = port
            ci.login_name = username
            ci.password = password
            ci.path = path
            ci.connection_mode = connmode.upper()
            connobj.connection_info = ci
        else:
            print_error('Wrong connector type %s' % params['type'])
            logger.error('Wrong connector type %s' % params['type'])
            return 1

        if connlist.add(connobj):
            ret = ret + 1

    return ret
Exemple #13
0
def connector_list(p_engine, format, envname, connector_name, details):
    """
    Print list of connectors
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: envname: environemnt name filter for connectors
    param4: connector_name: connector name to list
    param5: details: print connector details
    return 0 if connector found
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    data = DataFormatter()

    if details:
        data_header = [("Engine name", 30), ("Environment name", 30),
                       ("Connector name", 30), ("Connector type", 15),
                       ("Hostname", 30), ("Port", 5), ("Schema name", 30),
                       ("Type depended", 100)]
    else:
        data_header = [("Engine name", 30), ("Environment name", 30),
                       ("Connector name", 30), ("Connector type", 15)]

    data.create_header(data_header)
    data.format_type = format
    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        # connlist = DxConnectorsList()
        # envlist = DxEnvironmentList()
        # envlist.LoadEnvironments()
        # connlist.LoadConnectors(envname)

        DxConnectorsList(envname)

        if connector_name is None:
            connectors = DxConnectorsList.get_allref()
        else:
            connectors = DxConnectorsList.get_all_connectorId_by_name(
                connector_name)
            if connectors is None:
                ret = ret + 1
                continue

        for connref in connectors:
            connobj = DxConnectorsList.get_by_ref(connref)
            if details:
                rest = ''.join([
                    '%s = %s ' % (key, value)
                    for (key, value) in connobj.get_type_properties().items()
                ])
                data.data_insert(
                    engine_tuple[0],
                    DxEnvironmentList.get_by_ref(
                        connobj.environment_id).environment_name,
                    connobj.connector_name, connobj.connector_type,
                    connobj.host, connobj.port, connobj.schema_name, rest)
            else:
                data.data_insert(
                    engine_tuple[0],
                    DxEnvironmentList.get_by_ref(
                        connobj.environment_id).environment_name,
                    connobj.connector_name, connobj.connector_type)

    print("")
    print(data.data_output(False))
    print("")
    return ret
Exemple #14
0
def connector_update(p_engine, params):
    """
    Update connector from Masking engine
    param1: p_engine: engine name from configuration
    param2: connectorname: connectorname name
    param3: params: dict of parameters needed for connector to add
    return 0 if added, non 0 for error
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    connectorname = params['connname']
    envname = params['envname']

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        DxConnectorsList(envname)
        connref = DxConnectorsList.get_connectorId_by_name(connectorname)

        if connref is None:
            ret = ret + 1
            continue

        connobj = DxConnectorsList.get_by_ref(connref)

        if params['schemaName']:
            connobj.schema_name = params['schemaName']

        if params['jdbc']:
            connobj.host = None
            connobj.port = None
            if hasattr(connobj, 'sid'):
                connobj.sid = None
            if hasattr(connobj, 'instance_name'):
                connobj.instance_name = None
            if hasattr(connobj, 'database_name'):
                connobj.database_name = None
            connobj.jdbc = params['jdbc']
        else:
            connobj.jdbc = None
            if params['host']:
                connobj.host = params['host']

            if params['port']:
                connobj.port = params['port']
            if hasattr(connobj, 'sid'):
                if params['sid']:
                    connobj.sid = params['sid']

            if hasattr(connobj, 'instance_name'):
                if params['instancename']:
                    connobj.instance_name = params['instancename']

            if hasattr(connobj, 'database_name'):
                if params['databasename']:
                    connobj.database_name = params['databasename']

        if params['password']:
            connobj.password = params['password']

        if params['username']:
            connobj.username = params['username']

        if params['connname']:
            connobj.connector_name = params['connname']

        if connobj.update():
            ret = ret + 1

    return ret
Exemple #15
0
    def LoadRulesets_worker(self, environment_name, env_id):
        """
        Load list of rule sets
        Return None if OK
        """



        self.__rulesetList.clear()
        DxConnectorsList(environment_name)

        self.__api = DatabaseRulesetApi
        self.__fileapi = FileRulesetApi
        self.__apiexc = ApiException

        try:
            api_instance = self.__api(self.__engine.api_client)

            if environment_name:
                environment_id = DxEnvironmentList.get_environmentId_by_name(
                                 environment_name)

                if environment_id:
                    database_rulesets = paginator(
                            api_instance,
                            "get_all_database_rulesets",
                            environment_id=environment_id,
                            _request_timeout=self.__engine.get_timeout())
                else:
                    return 1

            else:
                if env_id:
                    environment_id = env_id
                    database_rulesets = paginator(
                            api_instance,
                            "get_all_database_rulesets",
                            environment_id=environment_id,
                            _request_timeout=self.__engine.get_timeout())
                else:
                    environment_id = None
                    database_rulesets = paginator(
                                            api_instance,
                                            "get_all_database_rulesets")

            if database_rulesets.response_list:
                for c in database_rulesets.response_list:
                    ruleset = DxDatabaseRuleset(self.__engine)
                    ruleset.from_ruleset(c)
                    self.__rulesetList[c.database_ruleset_id] = ruleset
            else:
                if environment_id:
                    self.__logger.error("No database ruleset found for "
                                        "environment name %s"
                                        % environment_name)
                else:
                    self.__logger.error("No database ruleset found")


            api_instance = self.__fileapi(self.__engine.api_client)

            if environment_id:
                file_rulesets = paginator(
                        api_instance,
                        "get_all_file_rulesets",
                        environment_id=environment_id)
            else:
                file_rulesets = paginator(
                        api_instance,
                        "get_all_file_rulesets")

            if file_rulesets.response_list:
                for c in file_rulesets.response_list:
                    ruleset = DxFileRuleset(self.__engine)
                    ruleset.from_ruleset(c)
                    self.__rulesetList[c.file_ruleset_id] = ruleset
            else:
                if environment_id:
                    self.__logger.error("No file ruleset found for "
                                        "environment name %s"
                                        % environment_name)
                else:
                    self.__logger.error("No file ruleset found")

        except self.__apiexc as e:
            print_error("Can't load ruleset %s" % e.body)
            return 1
Exemple #16
0
    def LoadRulesets_worker(self, environment_name, env_id):
        """
        Load list of rule sets
        Return None if OK
        """

        if self.__loaded_engine is None:
            self.__loaded_engine = self.__engine.get_name()

        #if self.__loaded_engine == self.__engine.get_name() and self.__rulesetList != {}:
        #   return None
        #else:
        # delete a list as we can have multi engines
        self.__rulesetList.clear()
        self.__loaded_engine = self.__engine.get_name()

        DxConnectorsList(environment_name)

        if (self.__engine.version_ge('6.0.0')):
            from masking_api_60.api.database_ruleset_api import DatabaseRulesetApi
            from masking_api_60.api.file_ruleset_api import FileRulesetApi
            from masking_api_60.rest import ApiException
        else:
            from masking_api_53.api.database_ruleset_api import DatabaseRulesetApi
            from masking_api_53.api.file_ruleset_api import FileRulesetApi
            from masking_api_53.rest import ApiException

        self.__api = DatabaseRulesetApi
        self.__fileapi = FileRulesetApi
        self.__apiexc = ApiException

        try:
            api_instance = self.__api(self.__engine.api_client)

            if environment_name:
                environment_id = DxEnvironmentList.get_environmentId_by_name(
                    environment_name)

                if environment_id:
                    database_rulesets = paginator(
                        api_instance,
                        "get_all_database_rulesets",
                        environment_id=environment_id,
                        _request_timeout=self.__engine.get_timeout())
                else:
                    return 1

            else:
                if env_id:
                    environment_id = env_id
                    database_rulesets = paginator(
                        api_instance,
                        "get_all_database_rulesets",
                        environment_id=environment_id,
                        _request_timeout=self.__engine.get_timeout())
                else:
                    environment_id = None
                    database_rulesets = paginator(api_instance,
                                                  "get_all_database_rulesets")

            if database_rulesets.response_list:
                for c in database_rulesets.response_list:
                    ruleset = DxDatabaseRuleset(self.__engine)
                    ruleset.from_ruleset(c)
                    self.__rulesetList[c.database_ruleset_id] = ruleset
            else:
                if environment_id:
                    self.__logger.error("No database ruleset found for "
                                        "environment name %s" %
                                        environment_name)
                else:
                    self.__logger.error("No database ruleset found")

            api_instance = self.__fileapi(self.__engine.api_client)

            if environment_id:
                file_rulesets = paginator(api_instance,
                                          "get_all_file_rulesets",
                                          environment_id=environment_id)
            else:
                file_rulesets = paginator(api_instance,
                                          "get_all_file_rulesets")

            if file_rulesets.response_list:
                for c in file_rulesets.response_list:
                    ruleset = DxFileRuleset(self.__engine)
                    ruleset.from_ruleset(c)
                    self.__rulesetList[c.file_ruleset_id] = ruleset
            else:
                if environment_id:
                    self.__logger.error("No file ruleset found for "
                                        "environment name %s" %
                                        environment_name)
                else:
                    self.__logger.error("No file ruleset found")

        except self.__apiexc as e:
            print_error("Can't load ruleset %s" % e.body)
            return 1
Exemple #17
0
def jobs_list_worker(p_engine, p_username,  jobname, envname, p_format, joblist_class):
    """
    Print list of jobs
    param1: p_engine: engine name from configuration
    param2: jobname: job name to list
    param3: envname: environemnt name to list jobs from
    param4: p_format: output format
    param5: joblist_class - DxJobsList, DxProfileJobslist
    return 0 if environment found
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    data = DataFormatter()
    data_header = [
                    ("Engine name", 30),
                    ("Job name", 30),
                    ("Ruleset name", 30),
                    ("Connector name", 30),
                    ("Environment name", 30),
                    ("Completed", 20),
                    ("Status", 20),
                    ("Runtime", 20)
                  ]
    data.create_header(data_header)
    data.format_type = p_format

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue

        # load all objects
        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulesetlist = DxRulesetList(envname)
        connectorlist = DxConnectorsList(envname)
        joblist = globals()[joblist_class]()

        logger.debug("Envname is %s, job name is %s" % (envname, jobname))

        joblist.LoadJobs(envname)
        #rulesetlist.LoadRulesets(envname)
        #connectorlist.LoadConnectors(envname)

        if jobname is None:
            jobs = joblist.get_allref()
        else:
            jobs = joblist.get_all_jobId_by_name(jobname)
            if jobs is None:
                ret = ret + 1
                continue

        for jobref in jobs:
            jobobj = joblist.get_by_ref(jobref)

            rulesetobj = rulesetlist.get_by_ref(jobobj.ruleset_id)
            # those test are requierd for 5.X engies where API is not showing all types of connectors
            if rulesetobj is not None:
                rulename = rulesetobj.ruleset_name
                connectorobj = connectorlist.get_by_ref(rulesetobj.connectorId)
                if connectorobj is not None:
                    connectorname = connectorobj.connector_name
                    envobj = envlist.get_by_ref(connectorobj.environment_id)
                    if envobj is not None:
                        envobjname = envobj.environment_name
                    else:
                         envobjname = "N/A"   
                else:
                    connectorname = "N/A"
                    envobjname = "N/A"
            else:
                rulename = "N/A"
                connectorname = "N/A"
                envobjname = "N/A"

            if jobobj.lastExec is not None:
                status = jobobj.lastExec.status
                if (jobobj.lastExec.end_time is not None) and \
                   (jobobj.lastExec.end_time is not None):
                    endtime = jobobj.lastExec.end_time \
                        .strftime("%Y-%m-%d %H:%M:%S")
                    runtimetemp = jobobj.lastExec.end_time \
                        - jobobj.lastExec.start_time
                    runtime = str(runtimetemp)
                else:
                    endtime = 'N/A'
                    runtime = 'N/A'
            else:
                status = 'N/A'
                endtime = 'N/A'
                runtime = 'N/A'

            data.data_insert(
                              engine_tuple[0],
                              jobobj.job_name,
                              rulename,
                              connectorname,
                              envobjname,
                              endtime,
                              status,
                              runtime
                            )
        print("")
        print (data.data_output(False))
        print("")
        return ret
Exemple #18
0
def ruleset_listmeta(p_engine, format, rulesetname, envname, metaname):
    """
    List tables/file from ruleset
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: rulesetname: ruleset name to display metadata from
    param4: envname: environemnt name to display metadata from
    param5: metamame: name of table/file to display
    return 0 if added, non zero for error
    """

    ret = 0
    found = False

    data = DataFormatter()
    data_header = [
                    ("Engine name", 30),
                    ("Environent name", 30),
                    ("Ruleset name", 30),
                    ("Metadata type", 15),
                    ("Metadata name", 32)
                  ]
    data.create_header(data_header)
    data.format_type = format
    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        connlist = DxConnectorsList()
        connlist.LoadConnectors(envname)

        if rulesetname:
            rulesetref_list = rulelist.get_all_rulesetId_by_name(rulesetname)
            if rulesetref_list is None:
                ret = ret + 1
                continue
        else:
            rulesetref_list = rulelist.get_allref()
            if rulesetref_list is None:
                continue

        metalist = DxMetaList()

        for ruleref in rulesetref_list:
            ruleobj = rulelist.get_by_ref(ruleref)
            connobj = connlist.get_by_ref(ruleobj.connectorId)
            if connobj:
                envobj = envlist.get_by_ref(connobj.environment_id)
                environment_name = envobj.environment_name
            else:
                environment_name = 'N/A'

            metalist.LoadMeta(ruleobj.ruleset_id)

            if metaname:
                metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1)
                if metalist_ref is None:
                    ret = ret + 1
                    continue
                found = True
            else:
                metalist_ref = metalist.get_allref()
                if metalist_ref is None:
                    continue

            for metaid in metalist_ref:
                metaobj = metalist.get_by_ref(metaid)
                data.data_insert(
                                  engine_tuple[0],
                                  environment_name,
                                  ruleobj.ruleset_name,
                                  ruleobj.type,
                                  metaobj.meta_name
                                )

    print("")
    print (data.data_output(False))
    print("")

    if found:
        return 0
    else:
        if metaname:
            print_error("Table or file %s not found" % metaname)
        return ret
def tab_list_details(p_engine, p_format, rulesetname, envname, metaname, what):
    """
    List details of tables/file from ruleset
    param1: p_engine: engine name from configuration
    param2: p_format: output format
    param3: rulesetname: ruleset name to display metadata from
    param4: envname: environemnt name to display metadata from
    param5: metamame: name of table/file to display
    param6: what - Database/File
    return 0 if added, non zero for error
    """

    ret = 0
    found = False

    data = DataFormatter()

    if what == 'Database':
        data_header = [("Engine name", 30), ("Environent name", 30),
                       ("Ruleset name", 30), ("Table name", 32),
                       ("Logical key", 32), ("Where clause", 50),
                       ("Custom SQL", 50)]
    else:
        data_header = [("Engine name", 30), ("Environent name", 30),
                       ("Ruleset name", 30), ("File name", 32),
                       ("File type", 32), ("File format name", 32),
                       ("Delimiter", 10), ("End of record", 10)]

    data.create_header(data_header)

    data.format_type = p_format

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
                                     engine_tuple[2], engine_tuple[3])

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        connlist = DxConnectorsList()
        connlist.LoadConnectors(envname)

        if rulesetname:
            rulesetref_all = rulelist.get_all_rulesetId_by_name(rulesetname)
            if rulesetref_all is None:
                ret = ret + 1
                continue
            rulesetref_list = [
                x for x in rulesetref_all
                if rulelist.get_by_ref(x).type == what
            ]
            if rulesetref_list is None:
                ret = ret + 1
                continue
        else:
            if what == 'Database':
                rulesetref_list = rulelist.get_all_database_rulesetIds()
                if rulesetref_list is None:
                    continue
            else:
                rulesetref_list = rulelist.get_all_file_rulesetIds()
                if rulesetref_list is None:
                    continue

        filetypelist = DxFileFormatList()
        metalist = DxMetaList()

        for ruleref in rulesetref_list:
            ruleobj = rulelist.get_by_ref(ruleref)
            connobj = connlist.get_by_ref(ruleobj.connectorId)
            if connobj:
                envobj = envlist.get_by_ref(connobj.environment_id)
                environment_name = envobj.environment_name
            else:
                environment_name = 'N/A'

            metalist.LoadMeta(ruleobj.ruleset_id)

            if metaname:
                metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1)
                if metalist_ref is None:
                    ret = ret + 1
                    continue
                found = True
            else:
                metalist_ref = metalist.get_allref()
                if metalist_ref is None:
                    continue

            for metaid in metalist_ref:
                metaobj = metalist.get_by_ref(metaid)
                if what == 'Database':
                    data.data_insert(engine_tuple[0], environment_name,
                                     ruleobj.ruleset_name, metaobj.meta_name,
                                     metaobj.key_column,
                                     repr(metaobj.where_clause),
                                     repr(metaobj.custom_sql))
                else:
                    if metaobj.file_format_id is not None:
                        fileformatobj = filetypelist.get_by_ref(
                            metaobj.file_format_id)
                        fileformatname = fileformatobj.file_format_name
                    else:
                        fileformatname = 'N/A'

                    data.data_insert(engine_tuple[0], environment_name,
                                     ruleobj.ruleset_name, metaobj.meta_name,
                                     metaobj.file_type, fileformatname,
                                     metaobj.delimiter,
                                     repr(metaobj.end_of_record))

    print("")
    print(data.data_output(False))
    print("")

    if found:
        return 0
    else:
        if metaname:
            print_error("Table %s not found" % metaname)
        return ret
Exemple #20
0
def sync_worker(p_engine, objecttype, objectname, envname,
                function_to_call, **kwargs):
    """
    Run an action for list of syncable objects
    param1: p_engine: engine name from configuration
    param2: objecttype: objecttype to list, all if None
    param3: objectname: objectname to list_table_details
    param4: function_to_call
    return 0 if objecttype found
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine)

    # objectname = "RandomValueLookup"
    # objectname = None
    ret = 0

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        synclist = DxSyncList(objecttype)

        if (objecttype is None or objecttype == "algorithm") \
           and envname is None:
            if objectname:
                alglist = [objectname]
            else:
                alglist = synclist.get_all_algorithms()

            for syncref in alglist:
                syncobj = synclist.get_object_by_type_name(
                                        "algorithm", syncref)
                if syncobj:

                    dynfunc = globals()[function_to_call]
                    ret = ret + dynfunc(
                        object=syncobj,
                        engine_obj=engine_obj,
                        envname='global',
                        name=syncref, **kwargs)

        if objecttype is None or objecttype == "database_connector" \
           or objecttype == "file_connector":

            envlist = DxEnvironmentList()
            connlist = DxConnectorsList(envname)

            if objecttype is None:
                objtypelist = ["database_connector", "file_connector"]
            else:
                objtypelist = [objecttype]

            for objtype in objtypelist:

                if objectname:
                    connbynameref = connlist.get_connectorId_by_name(
                                        objectname, False)
                    if connbynameref:
                        syncconnref = int(connbynameref[1:])
                        if synclist.get_object_by_type_name(
                                                objtype,
                                                syncconnref):
                            connrefs = [syncconnref]
                        else:
                            connrefs = []
                    else:
                        connrefs = []
                else:
                    connrefs = synclist.get_all_object_by_type(objtype)

                for syncref in connrefs:
                    syncobj = synclist.get_object_by_type_name(
                                        objtype, syncref)
                    if syncobj.object_type == 'DATABASE_CONNECTOR':
                        connobj = connlist.get_by_ref("d" + str(syncref))
                    else:
                        connobj = connlist.get_by_ref("f" + str(syncref))

                    if connobj is None:
                        # limited by env
                        continue
                    envobj = envlist.get_by_ref(connobj.environment_id)

                    dynfunc = globals()[function_to_call]
                    ret = ret + dynfunc(
                        object=syncobj,
                        engine_obj=engine_obj,
                        envname=envobj.environment_name,
                        name=connobj.connector_name,
                        **kwargs)

        if objecttype is None or objecttype == "database_ruleset" \
           or objecttype == "file_ruleset":

            envlist = DxEnvironmentList()
            connlist = DxConnectorsList(envname)
            rulesetList = DxRulesetList(envname)

            if objecttype is None:
                objtypelist = ["database_ruleset", "file_ruleset"]
            else:
                objtypelist = [objecttype]

            for objtype in objtypelist:

                if objectname:
                    rulesetrefs = []
                    rulesetref = rulesetList.get_all_rulesetId_by_name(
                                                objectname)
                    if rulesetref:
                        for rsref in rulesetref:
                            if synclist.get_object_by_type_name(
                                                objtype, rsref):
                                rulesetrefs.append(rsref)
                            else:
                                rulesetrefs = []
                    else:
                        rulesetrefs = []
                else:
                    rulesetrefs = synclist.get_all_object_by_type(objtype)

                for syncref in rulesetrefs:
                    syncobj = synclist.get_object_by_type_name(objtype,
                                                               syncref)
                    rulesetobj = rulesetList.get_by_ref(syncref)
                    if rulesetobj is None:
                        # limited by env
                        continue
                    connobj = connlist.get_by_ref(rulesetobj.connectorId)
                    envobj = envlist.get_by_ref(connobj.environment_id)
                    dynfunc = globals()[function_to_call]
                    ret = ret + dynfunc(
                        object=syncobj,
                        engine_obj=engine_obj,
                        envname=envobj.environment_name,
                        name=rulesetobj.ruleset_name,
                        **kwargs)

        if (objecttype is None or objecttype == "global_object"
           or objecttype == "key" or objecttype == "domain") \
           and envname is None:

            if objecttype is None:
                objtypelist = ["global_object", "key", "domain"]
            else:
                objtypelist = [objecttype]

            for objtype in objtypelist:
                if objectname:
                    objlist = [objectname]
                else:
                    objlist = synclist.get_all_object_by_type(objtype)
                for syncref in objlist:
                    syncobj = synclist.get_object_by_type_name(objtype,
                                                               syncref)
                    if syncobj:

                        dynfunc = globals()[function_to_call]
                        ret = ret + dynfunc(
                            object=syncobj,
                            engine_obj=engine_obj,
                            envname='global',
                            name=syncref, **kwargs)

        if objecttype is None or objecttype == "masking_job":

            envlist = DxEnvironmentList()
            joblist = DxJobsList()
            joblist.LoadJobs(envname)
            connlist = DxConnectorsList(envname)
            rulesetlist = DxRulesetList(envname)

            if objectname:
                jobref = joblist.get_jobId_by_name(objectname)
                if synclist.get_object_by_type_name("masking_job", jobref):
                    jobrefs = [jobref]
                else:
                    jobrefs = []
            else:
                jobrefs = synclist.get_all_object_by_type("masking_job")

            for syncref in jobrefs:
                syncobj = synclist.get_object_by_type_name("masking_job",
                                                           syncref)
                jobobj = joblist.get_by_ref(syncref)
                if envname and jobobj is None:
                    # limited by env
                    continue
                rulesetobj = rulesetlist.get_by_ref(jobobj.ruleset_id)
                connectorobj = connlist.get_by_ref(rulesetobj.connectorId)
                envobj = envlist.get_by_ref(connectorobj.environment_id)
                dynfunc = globals()[function_to_call]
                ret = ret + dynfunc(
                    object=syncobj,
                    engine_obj=engine_obj,
                    envname=envobj.environment_name,
                    name=jobobj.job_name,
                    **kwargs)

    return ret
Exemple #21
0
def column_worker(p_engine,
                  sortby,
                  rulesetname,
                  envname,
                  metaname,
                  columnname,
                  filter_algname,
                  filter_is_masked,
                  algname,
                  is_masked,
                  domainname,
                  function_to_call,
                  data=None,
                  inventory=None,
                  **kwargs):
    """
    Select a column using all filter parameters
    and run action defined in function_to_call

    param1: p_engine: engine name from configuration
    param2: sortby: sort by output if needed
    param3: rulesetname: ruleset name
    param4: envname: environment name
    param5: metaname: meta name (table or file)
    param6: columnname: column name (column or field)
    param7: filter_algname: algorithm name to filter
    param8: filter_is_masked: is masked fileter
    param9: algname: new algorithm to set
    param10: is_masked: set masking False/True
    param11: domainname: new domain to set
    param12: function_to_call: function name to call
    param13: data: output object
    return 0 action is processed without issues
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList(envname)
        connlist = DxConnectorsList(envname)
        metalist = DxMetaList()

        rulesetref_list = []

        if rulesetname:
            ruleref = rulelist.get_rulesetId_by_name(rulesetname)
            if ruleref:
                rulesetref_list.append(ruleref)
        else:
            rulesetref_list = rulelist.get_allref()

        for ruleref in rulesetref_list:
            ruleobj = rulelist.get_by_ref(ruleref)
            connobj = connlist.get_by_ref(ruleobj.connectorId)

            if connobj:
                envobj = envlist.get_by_ref(connobj.environment_id)
            else:
                envobj = None

            metalist.LoadMeta(ruleobj.ruleset_id)

            metasetref_list = []

            if metaname:
                metaref = metalist.get_MetadataId_by_name(metaname, 1)
                if metaref:
                    metasetref_list.append(metaref)
            else:
                metasetref_list = metalist.get_allref()

            for metaid in metasetref_list:
                metaobj = metalist.get_by_ref(metaid)
                collist = DxColumnList()
                collist.LoadColumns(metadata_id=metaid,
                                    is_masked=filter_is_masked)

                colsetref_list = []

                colcount = kwargs.get("colcount")
                if colcount is not None:
                    colcount.extend(collist.get_allref())

                if columnname:
                    colref = collist.get_column_id_by_name(columnname)
                    logger.debug("Column ref with name %s : %s" %
                                 (columnname, colref))
                    if colref:
                        colsetref_list.append(colref)
                else:
                    colsetref_list = collist.get_allref()

                logger.debug("List of columns to process : %s" %
                             colsetref_list)

                if filter_algname:
                    colsetref_masked = collist.get_column_id_by_algorithm(
                        filter_algname)
                    logger.debug("List of columns with algorithm %s : %s" %
                                 (filter_algname, colsetref_masked))
                    colsetref_list = list(
                        set(colsetref_list)
                        & set(colsetref_masked))
                    logger.debug("Intersection with column name filter %s" %
                                 colsetref_masked)

                for colref in colsetref_list:
                    colobj = collist.get_by_ref(colref)

                    dynfunc = globals()[function_to_call]
                    ret = ret + dynfunc(data=data,
                                        engine=engine_tuple,
                                        envobj=envobj,
                                        ruleobj=ruleobj,
                                        metaobj=metaobj,
                                        colobj=colobj,
                                        algname=algname,
                                        is_masked=is_masked,
                                        domainname=domainname,
                                        inventory=inventory,
                                        **kwargs)
    return ret
Exemple #22
0
def jobs_report_worker(p_engine, p_username,  jobname, envname, p_format, last, startdate, enddate, details, jobtype='masking'):
    """
    Print report of jobs
    param1: p_engine: engine name from configuration
    param2: jobname: job name to list
    param3: envname: environemnt name to list jobs from
    param4: p_format: output format
    param5: last: display last job only
    param6: startdate: filter by start date
    param7: enddate: filter by end date
    param8: details
    param9: joblist_class - DxJobsList, DxProfileJobslist
    return 0 if environment found
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    data = DataFormatter()

    if jobtype == 'masking':
    
        if details:
            data_header = [
                    ("Engine name", 30),
                    ("Environment name", 30),
                    ("Job name", 30),  
                    ("ExecId", 6),               
                    ("Meta name", 12),
                    ("Masked Rows", 10),   
                    ("Started", 20),                                                              
                    ("Completed", 20),
                    ("Status", 20),
                    ("Runtime", 20)                  
                    ]
        else:
            data_header = [
                            ("Engine name", 30),
                            ("Environment name", 30),
                            ("Job name", 30),
                            ("Job Id", 6),                    
                            ("Min Memory", 10),
                            ("Max Memory", 10), 
                            ("Streams", 7),                                              
                            ("On The Fly", 10),
                            ("Ruleset Type", 12),
                            ("ExecId", 6),
                            ("Total Rows", 10),
                            ("Masked Rows", 10),   
                            ("Started", 20),                                                              
                            ("Completed", 20),
                            ("Status", 20),
                            ("Runtime", 20)                  
                        ]

    else:
        data_header = [
                ("Engine name", 30),
                ("Environment name", 30),
                ("Job name", 30),  
                ("Ruleset Type", 12),
                ("ExecId", 6),                 
                ("Started", 20),                                                              
                ("Completed", 20),
                ("Status", 20),
                ("Runtime", 20)                  
                ]


    data.create_header(data_header)
    data.format_type = p_format

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue

        # load all objects
        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulesetlist = DxRulesetList(envname)
        connectorlist = DxConnectorsList(envname)
        if jobtype == 'masking':
            joblist = DxJobsList()
        else:
            joblist = DxProfileJobsList()

        logger.debug("Envname is %s, job name is %s" % (envname, jobname))

        joblist.LoadJobs(envname)
        #rulesetlist.LoadRulesets(envname)
        #connectorlist.LoadConnectors(envname)

        if jobname is None:
            jobs = joblist.get_allref()
        else:
            jobs = joblist.get_all_jobId_by_name(jobname)
            if jobs is None:
                ret = ret + 1
                continue

        for jobref in jobs:
            jobobj = joblist.get_by_ref(jobref)

            rulesetobj = rulesetlist.get_by_ref(jobobj.ruleset_id)
            # those test are requierd for 5.X engies where API is not showing all types of connectors
            if rulesetobj is not None:
                ruleset_type = rulesetobj.type
                rulename = rulesetobj.ruleset_name
                connectorobj = connectorlist.get_by_ref(rulesetobj.connectorId)
                if connectorobj is not None:
                    connectorname = connectorobj.connector_name
                    envobj = envlist.get_by_ref(connectorobj.environment_id)
                    if envobj is not None:
                        envobjname = envobj.environment_name
                    else:
                        envobjname = "N/A"   
                else:
                    connectorname = "N/A"
                    envobjname = "N/A"
            else:
                rulename = "N/A"
                connectorname = "N/A"
                envobjname = "N/A"
                ruleset_type = "N/A"


            if last:
                lastonly = True
            else:
                lastonly = False


            if lastonly:
                execlist = [ jobobj.lastExec ]
            else:
                if startdate or enddate:
                    execlist = jobobj.filter_executions(startdate, enddate)
                else:
                    execlist = jobobj.execList

            if execlist:

                for jobexec in execlist:



                    if details == False:

                        if jobtype == 'masking':

                            if jobexec is not None:
                                status = jobexec.status
                                execid = jobexec.execution_id
                                rowsmasked = jobexec.rows_masked
                                rowstotal = jobexec.rows_total
                                if jobexec.start_time is not None:
                                    starttime = jobexec.start_time.strftime("%Y-%m-%d %H:%M:%S")
                                else:
                                    starttime = 'N/A'
                                if (jobexec.end_time is not None) and \
                                (jobexec.start_time is not None):
                                    endtime = jobexec.end_time \
                                        .strftime("%Y-%m-%d %H:%M:%S")
                                    runtimetemp = jobexec.end_time \
                                        - jobexec.start_time
                                    runtime = str(runtimetemp)
                                else:
                                    endtime = 'N/A'
                                    runtime = 'N/A'
                            else:
                                status = 'N/A'
                                endtime = 'N/A'
                                starttime = 'N/A'
                                runtime = 'N/A'
                                execid = 'N/A'
                                rowsmasked = 'N/A'
                                rowstotal = 'N/A'

                            data.data_insert(
                                            engine_tuple[0],
                                            envobjname,
                                            jobobj.job_name,
                                            jobobj.masking_job_id,
                                            jobobj.min_memory,
                                            jobobj.max_memory,
                                            jobobj.num_input_streams,
                                            jobobj.on_the_fly_masking,
                                            ruleset_type,
                                            execid,
                                            rowstotal,
                                            rowsmasked,
                                            starttime,
                                            endtime,
                                            status,
                                            runtime
                                            )
                        else:

                            if jobexec is not None:
                                status = jobexec.status
                                execid = jobexec.execution_id
                                if jobexec.start_time is not None:
                                    starttime = jobexec.start_time.strftime("%Y-%m-%d %H:%M:%S")
                                else:
                                    starttime = 'N/A'
                                if (jobexec.end_time is not None) and \
                                (jobexec.start_time is not None):
                                    endtime = jobexec.end_time \
                                        .strftime("%Y-%m-%d %H:%M:%S")
                                    runtimetemp = jobexec.end_time \
                                        - jobexec.start_time
                                    runtime = str(runtimetemp)
                                else:
                                    endtime = 'N/A'
                                    runtime = 'N/A'
                            else:
                                status = 'N/A'
                                endtime = 'N/A'
                                starttime = 'N/A'
                                runtime = 'N/A'
                                execid = 'N/A'


                            data.data_insert(
                                            engine_tuple[0],
                                            envobjname,
                                            jobobj.job_name,
                                            ruleset_type,
                                            execid,
                                            starttime,
                                            endtime,
                                            status,
                                            runtime
                                            )

                    else:
                        # details here      
                        if jobexec is not None:         
                            execid = jobexec.execution_id
                            complist = jobobj.list_execution_component(execid)

                            if complist is not None:
                                for comp in complist:    
                                    status = comp.status
                                    rowsmasked = comp.rows_masked
                                    metaname = comp.component_name
                                    if comp.start_time is not None:
                                        starttime = comp.start_time.strftime("%Y-%m-%d %H:%M:%S")
                                    else:
                                        starttime = 'N/A'
                                    if (comp.end_time is not None) and \
                                    (comp.start_time is not None):
                                        endtime = comp.end_time \
                                            .strftime("%Y-%m-%d %H:%M:%S")
                                        runtimetemp = comp.end_time \
                                            - comp.start_time
                                        runtime = str(runtimetemp)
                                    else:
                                        endtime = 'N/A'
                                        runtime = 'N/A'

                                    data.data_insert(
                                                    engine_tuple[0],
                                                    envobjname,
                                                    jobobj.job_name,
                                                    execid,
                                                    metaname,
                                                    rowsmasked,
                                                    starttime,
                                                    endtime,
                                                    status,
                                                    runtime
                                                    )
                        else:
                            print("setting 1")
                            ret = 1


                    
                else:
                    # no executions
                    ret = 1

            else:
                # no executions
                ret = 1


        print("")
        print (data.data_output(False))
        print("")
        return ret
Exemple #23
0
def connector_add(p_engine, p_username,  params):
    """
    Add application to Masking engine
    param1: p_engine: engine name from configuration
    param2: params: dict of parameters needed for connector to add
    return 0 if added, non 0 for error
    """

    ret = 0
    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    envname = params['envname']
    schemaName = params['schemaName']
    host = params['host']
    port = params['port']
    password = params['password']
    username = params['username']
    connname = params['connname']

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        logger.debug("Envname is %s" % envname)
        envref = envlist.get_environmentId_by_name(envname)

        if envref is None:
            ret = ret + 1
            continue

        connlist = DxConnectorsList()
        if params['type'] in database_types:
            if params['type'] == 'oracle':
                connobj = OracleConnector(engine_obj)
                dbtype = 'ORACLE'
            elif params['type'] == 'mssql':
                connobj = MSSQLConnector(engine_obj)
                dbtype = 'MSSQL'
            elif params['type'] == 'sybase':
                connobj = SybaseConnector(engine_obj)
                dbtype = 'SYBASE'
            elif params['type'] == 'extended':
                connobj = ExtendedConnector(engine_obj)
                dbtype = 'EXTENDED'
            else:
                connobj = DxConnector(engine_obj)
                dbtype = params['type'].upper()

            connobj.create_connector(
                connector_name = connname,
                database_type = dbtype,
                environment_id = envref
            )


            connobj.schema_name = schemaName
            connobj.username = username
            connobj.password = password
            connobj.host = host

            if port:
                connobj.port = int(port)
            connobj.sid = params['sid']
            connobj.jdbc = params['jdbc']
            connobj.instance_name = params['instancename']
            connobj.database_name = params['databasename']

            if params['jdbc_driver_name']:
                jdbclist = DxJDBCList()
                driver_id = jdbclist.get_driver_id_by_name(params['jdbc_driver_name'])
                connobj.jdbc_driver_id = driver_id

        elif params['type'] in file_types:
            path = params['path']
            connmode = params['servertype']
            connobj = DxFileConnector(engine_obj)
            connobj.is_database = False
            connobj.create_connector(
                connector_name = connname,
                file_type = params['type'].upper(),
                environment_id = envref,
                host=host,
                port=port,
                login_name=username,
                password=password,
                path=path,
                connection_mode=connmode.upper()
            )

        else:
            print_error('Wrong connector type %s' % params['type'])
            logger.error('Wrong connector type %s' % params['type'])
            return 1

        if connlist.add(connobj):
            ret = ret + 1

    return ret