Beispiel #1
0
def ruleset_list(p_engine, format, rulesetName, envname):
    """
    Print list of ruleset by ruleset name or environment name
    param1: p_engine: engine name from configuration
    param2: format: output format
    param2: ruleset: name of ruleset to display
    param3: envname: name of environment to list ruleset
    return 0 if environment found
    """
    data = DataFormatter()
    data_header = [
                    ("Engine name", 30),
                    ("Ruleset name", 30),
                    ("Connector name", 30),
                    ("Metadata type", 15),
                    ("Connector type", 15),
                    ("Environent name", 30)
                  ]
    data.create_header(data_header)
    data.format_type = format

    ret = ruleset_list_worker(
            p_engine=p_engine,
            format=format,
            rulesetName=rulesetName,
            envname=envname,
            function_to_call="do_list",
            data=data)

    print("")
    print (data.data_output(False))
    print("")
    return ret
def profile_list(p_engine, profilename, expname, p_format, mapping):
    """
    Print list of Profile sets
    param1: p_engine: engine name from configuration
    param2: profilename: profile name to list
    param3: expname: expression name to list if details is true
    param4: p_format: output format
    param5: mapping: print expressions mapping for each profile
    return 0 if profile name found
    """
    data = DataFormatter()
    if not mapping:
        data_header = [("Engine name", 30), ("Profile name", 30),
                       ("Number of expressions", 30), ("Created by", 30),
                       ("Created time", 30), ("Description", 30)]
    else:
        data_header = [("Engine name", 30), ("Profile name", 30),
                       ("Expression name", 30), ("Domain name", 30),
                       ("Created by", 30), ("Column/Data level", 30),
                       ("Regular expression", 30)]

    data.create_header(data_header)
    data.format_type = p_format
    ret = profile_worker(p_engine=p_engine,
                         profilename=profilename,
                         expname=expname,
                         function_to_call='do_profilelist',
                         p_format=p_format,
                         mapping=mapping,
                         data=data)

    print("")
    print(data.data_output(False))
    print("")
    return ret
Beispiel #3
0
def engine_list(p_engine, p_username, p_format):
    """
    List Masking engines from configuration file
    param1: p_engine: name of Masking engine
    param2: p_username: username
    param3: p_format: output format
    return None if OK or integer with error, ex. no rows found
    """
    data = DataFormatter()
    data_header = [("Engine name", 30), ("IP", 30), ("username", 30),
                   ("protocol", 8), ("port", 5), ("default", 7),
                   ("proxy URL", 30), ("proxy user", 30)]
    data.create_header(data_header)
    data.format_type = p_format

    config = DxConfig()
    config.init_metadata()
    if p_engine is None:
        p_engine = 'all'
    rows = config.get_engine_info(p_engine, p_username)

    if rows is None:
        return -1

    for row in rows:
        data.data_insert(row[0], row[1], row[2], row[4], row[5], row[6],
                         row[8], row[9])
    print("")
    print(data.data_output(False))
    print("")
    return None
Beispiel #4
0
def do_print_meta(**kwargs):
    connref = kwargs.get('connref')
    connlist = kwargs.get('connlist')
    engine_obj = kwargs.get('engine_obj')
    format = kwargs.get('format')

    connobj = connlist.get_by_ref(connref)

    if connobj.is_database:
        metaname = 'Table name'
    else:
        metaname = 'File name'

    data = DataFormatter()

    data_header = [("Engine name", 30), ("Connector name", 30), (metaname, 40)]

    data.create_header(data_header)
    data.format_type = format
    metalist = connobj.fetch_meta()

    if metalist:
        for meta_item in metalist:
            data.data_insert(engine_obj.get_name(), connobj.connector_name,
                             meta_item)
    else:
        print_error("List of tables/files is empty")
        return 1

    print("")
    print(data.data_output(False))
    print("")
    return 0
def profile_export(p_engine, profilename, exportfile):
    """
    Export list of Profile sets into csv
    param1: p_engine: engine name from configuration
    param2: profilename: profile name to list
    param3: exportfile: file location
    return 0 if profile name found
    """
    data = DataFormatter()
    data_header = [("Profile name", 30), ("Expression name", 30)]

    data.create_header(data_header)
    data.format_type = "csv"
    ret = profile_worker(p_engine=p_engine,
                         profilename=profilename,
                         exportfile=exportfile,
                         function_to_call='do_profileexport',
                         data=data,
                         mapping=True)

    if ret == 0:
        output = data.data_output(False)
        try:
            exportfile.write(output)
            exportfile.close()
            print_message("Profile(s) saved to file %s" % exportfile.name)
            return 0
        except Exception as e:
            print_error("Problem with file %s Error: %s" %
                        (exportfile.name, str(e)))
            return 1

    else:
        return 1
Beispiel #6
0
def sync_list(p_engine, objecttype, objectname, envname, format):
    """
    Print list of syncable objects
    param1: p_engine: engine name from configuration
    param2: objecttype: objecttype to list, all if None
    param3: objectname: objectname to list_table_details
    param4: format: output format
    return 0 if objecttype found
    """
    data = DataFormatter()
    data_header = [
                    ("Engine name", 30),
                    ("Object type", 30),
                    ("Env name",    32),
                    ("Object name", 32),
                    ("Revision",    50)
                  ]
    data.create_header(data_header)
    data.format_type = format

    ret = sync_worker(p_engine, objecttype, objectname, envname, "do_list",
                      data=data)

    print("XXX")
    print (data.data_output(False))
    print("")

    return ret
Beispiel #7
0
def expression_list(p_engine, p_username,  expname, p_format):
    """
    Print list of expressions
    param1: p_engine: engine name from configuration
    param2: expname: expression name to list
    param3: p_format: output format
    return 0 if profile name found
    """

    data = DataFormatter()
    data_header = [
                    ("Engine name", 30),
                    ("Expression name", 30),
                    ("Domain name", 30),
                    ("Created by", 30),
                    ("Column/Data level", 30),
                    ("Regular expression", 30)
                  ]

    data.create_header(data_header)
    data.format_type = p_format

    ret = expression_worker(p_engine=p_engine,
                            expname=expname,
                            function_to_call='do_expresionlist',
                            p_format=p_format,
                            data=data)

    print("")
    print (data.data_output(False))
    print("")
    return ret
Beispiel #8
0
def driver_list(p_engine, p_username, format, driver_name):
    """
    Print list of file formats
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: drver_name: driver name
    return 0 if environment found
    """

    ret = 0

    logger = logging.getLogger()
    logger.debug("driver_name {}".format(driver_name))

    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    data = DataFormatter()
    data_header = [("Engine name", 30), ("Driver name", 30),
                   ("Driver class name", 50), ("Built-in", 10)]
    data.create_header(data_header)
    data.format_type = format
    for engine_tuple in enginelist:

        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue

        if not feature_support(engine_obj, "5.3.9"):
            ret = ret + 1
            continue

        driver_list = DxJDBCList()
        # load all objects

        if driver_name:
            drivers = driver_list.get_all_driver_id_by_name(driver_name)
            if drivers is None:
                ret = ret + 1
        else:
            drivers = driver_list.get_allref()

        for driver_ref in drivers:
            driver_obj = driver_list.get_by_ref(driver_ref)

            if driver_obj.built_in:
                builtin = "True"
            else:
                builtin = "False"

            data.data_insert(engine_tuple[0], driver_obj.driver_name,
                             driver_obj.driver_class_name, builtin)

    print("")
    print(data.data_output(False))
    print("")
    return ret
Beispiel #9
0
def domain_list(p_engine, format, domainname):
    """
    Print list of algorithms
    param1: p_engine: engine name from configuration
    param2: domainname: domain name to list, all if None
    return 0 if domain name found
    """

    ret = 0

    logger = logging.getLogger()

    data = DataFormatter()
    data_header = [
                    ("Engine name", 30),
                    ("Domain name", 32)
                  ]
    data.format_type = format
    data.create_header(data_header)

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue


        domainlist = DxDomainList()
        domainref_list = []

        if domainname:
            domobj = domainlist.get_by_ref(domainname)
            if domobj:
                domainref_list.append(domobj.domain_name)
            else:
                print_error("Domain {} not found".format(domainname))
                return 1
        else:
            domainref_list = domainlist.get_allref()

        for domainref in domainref_list:
            domobj = domainlist.get_by_ref(domainref)
            data.data_insert(
                              engine_tuple[0],
                              domobj.domain_name,
                            )


        print("")
        print (data.data_output(False))
        print("")

    return ret
Beispiel #10
0
def environment_list(p_engine, format, envname):
    """
    Print list of environments
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: envname: environemnt name to list, all if None
    return 0 if environment found
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    data = DataFormatter()
    data_header = [
                    ("Engine name", 30),
                    ("Environment name", 30),
                    ("Application name", 30)
                  ]
    data.create_header(data_header)
    data.format_type = format
    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue
        envlist = DxEnvironmentList()
        # load all objects
        envlist.LoadEnvironments()

        if envname is None:
            environments = envlist.get_allref()
        else:
            environment = envlist.get_environmentId_by_name(envname)

            if environment is None:
                ret = ret + 1
                continue
            environments = [environment]

        for envref in environments:
            envobj = envlist.get_by_ref(envref)
            data.data_insert(
                              engine_tuple[0],
                              envobj.environment_name,
                              envobj.application
                            )
    print("")
    print (data.data_output(False))
    print("")
    return ret
Beispiel #11
0
def column_list(p_engine, format, sortby, rulesetname, envname, metaname,
                columnname, algname, is_masked):
    """
    Print column list
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: sortby: sort by output if needed
    param4: rulesetname: ruleset name
    param5: envname: environment name
    param6: metaname: meta name (table or file)
    param7: columnname: column name (column or field)
    param8: algname: algorithm name to filter
    param9: is_masked: is masked fileter
    return 0 if no issues
    """

    data = DataFormatter()
    data_header = [
        ("Engine name", 30),
        ("Environment name", 30),
        ("Ruleset name", 30),
        ("Metadata name", 32),
        ("Column name", 32),
        ("Type", 8),
        ("Data type", 30),
        ("Date format", 15),
        ("Domain name", 32),
        ("Alg name", 32),
    ]
    data.create_header(data_header)
    data.format_type = format
    ret = column_worker(p_engine,
                        sortby,
                        rulesetname,
                        envname,
                        metaname,
                        columnname,
                        algname,
                        is_masked,
                        None,
                        None,
                        None,
                        'do_print',
                        data=data)

    print("")
    print(data.data_output(False, sortby))
    print("")

    return ret
Beispiel #12
0
def application_list(p_engine, format, appname):
    """
    Print list of applications
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: appname: application name to list, all if None
    return 0 if application found
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    data = DataFormatter()
    data_header = [
                    ("Engine name", 30),
                    ("Application name", 30),
                  ]
    data.create_header(data_header)
    data.format_type = format
    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue
        applist = DxApplicationList()
        # load all objects
        applist.LoadApplications()

        if appname is None:
            applications = applist.get_allref()
        else:
            applications = applist.get_applicationId_by_name(appname)
            if len(applications) == 0:
                ret = ret + 1

        for appref in applications:
            appobj = applist.get_by_ref(appref)
            data.data_insert(
                              engine_tuple[0],
                              appobj.application_name
                            )
        print("")
        print (data.data_output(False))
        print("")
        return ret
Beispiel #13
0
def role_list(p_engine, p_username, format, rolename):
    """
    Print list of roles
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: rolename: role name to list, all if None
    return 0 if role found
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    data = DataFormatter()
    data_header = [("Engine name", 30), ("Role name", 30)]
    data.create_header(data_header)
    data.format_type = format
    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue
        rolelist = DxRoleList()
        # load all objects

        if rolename is None:
            roles = rolelist.get_allref()
        else:
            role = rolelist.get_roleId_by_name(rolename)

            if role is None:
                ret = ret + 1
                continue
            roles = [role]

        for roleref in roles:
            roleobj = rolelist.get_by_ref(roleref)
            data.data_insert(engine_tuple[0], roleobj.role_name)
    print("")
    print(data.data_output(False))
    print("")
    return ret
Beispiel #14
0
def column_export(p_engine, sortby, rulesetname, envname, metaname, columnname,
                  algname):
    """
    Print column list
    param1: p_engine: engine name from configuration
    param2: sortby: sort by output if needed
    param3: rulesetname: ruleset name
    param4: envname: environment name
    param5: metaname: meta name (table or file)
    param6: columnname: column name (column or field)
    param7: algname: algorithm name to filter
    param8: is_masked: is masked fileter
    param9: file: file to write output
    return 0 if no issues
    """

    data = DataFormatter()
    data_header = [("Metadata name", 32), ("Column name", 32),
                   ("Alg name", 32), ("Domain name", 32), ("is_masked", 32),
                   ("idmethod", 32), ("dateformat", 32)]
    data.create_header(data_header)
    data.format_type = "json"

    ret = column_worker(p_engine,
                        sortby,
                        rulesetname,
                        envname,
                        metaname,
                        columnname,
                        algname,
                        None,
                        None,
                        None,
                        None,
                        'do_export',
                        data=data)

    if ret == 0:
        return data
    else:
        return None
Beispiel #15
0
def user_list(p_engine, format, username):
    """
    Print list of users
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: username: user name to list, all if None
    return 0 if role found
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    data = DataFormatter()
    data_header = [("Engine name", 30), ("User name", 30), ("First name", 30),
                   ("Last name", 30), ("E-mail", 30), ("Auth type", 10),
                   ("Principal", 30), ("Role name", 30), ("Locked", 6),
                   ("Environment list", 30)]
    data.create_header(data_header)
    data.format_type = format
    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue
        userlist = DxUserList()
        rolelist = DxRoleList()
        envlist = DxEnvironmentList()

        # check if ldap is configured
        appsettingslist = DxAppSettingList()
        ldapobj = appsettingslist.get_appSetting_by_group_and_name(
            'ldap', 'Enable')

        if ldapobj.setting_value == 'true':
            msadobj = appsettingslist.get_appSetting_by_group_and_name(
                'ldap', 'MsadDomain')
        else:
            msadobj = None

        if username is None:
            users = userlist.get_allref()
        else:
            user = userlist.get_userId_by_name(username)

            if user is None:
                ret = ret + 1
                continue
            users = [user]

        for userref in users:
            userobj = userlist.get_by_ref(userref)

            if not userobj.is_admin:
                roleobj = rolelist.get_by_ref(
                    userobj.non_admin_properties.role_id)
                if roleobj is not None:
                    rolename = roleobj.role_name
                else:
                    rolename = 'Not Found'
            else:
                rolename = 'Administrator'

            if userobj.non_admin_properties is not None:
                if userobj.non_admin_properties.environment_ids:
                    envs = ';'.join([
                        envlist.get_by_ref(x).environment_name
                        for x in userobj.non_admin_properties.environment_ids
                    ])
                else:
                    envs = ''
            else:
                envs = ''

            if userobj.is_locked:
                locked = 'Locked'
            else:
                locked = 'Open'

            if msadobj is None:
                authtype = 'NATIVE'
                principal = ''
            else:
                authtype = 'LDAP'
                principal = '{}@{}'.format(userobj.user_name,
                                           msadobj.setting_value)

            data.data_insert(engine_tuple[0], userobj.user_name,
                             userobj.first_name, userobj.last_name,
                             userobj.email, authtype, principal, rolename,
                             locked, envs)
    print("")
    print(data.data_output(False))
    print("")
    return ret
Beispiel #16
0
def jobs_report_worker(p_engine, p_username,  jobname, envname, p_format, last, startdate, enddate, details, jobtype='masking'):
    """
    Print report of jobs
    param1: p_engine: engine name from configuration
    param2: jobname: job name to list
    param3: envname: environemnt name to list jobs from
    param4: p_format: output format
    param5: last: display last job only
    param6: startdate: filter by start date
    param7: enddate: filter by end date
    param8: details
    param9: joblist_class - DxJobsList, DxProfileJobslist
    return 0 if environment found
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    data = DataFormatter()

    if jobtype == 'masking':
    
        if details:
            data_header = [
                    ("Engine name", 30),
                    ("Environment name", 30),
                    ("Job name", 30),  
                    ("ExecId", 6),               
                    ("Meta name", 12),
                    ("Masked Rows", 10),   
                    ("Started", 20),                                                              
                    ("Completed", 20),
                    ("Status", 20),
                    ("Runtime", 20)                  
                    ]
        else:
            data_header = [
                            ("Engine name", 30),
                            ("Environment name", 30),
                            ("Job name", 30),
                            ("Job Id", 6),                    
                            ("Min Memory", 10),
                            ("Max Memory", 10), 
                            ("Streams", 7),                                              
                            ("On The Fly", 10),
                            ("Ruleset Type", 12),
                            ("ExecId", 6),
                            ("Total Rows", 10),
                            ("Masked Rows", 10),   
                            ("Started", 20),                                                              
                            ("Completed", 20),
                            ("Status", 20),
                            ("Runtime", 20)                  
                        ]

    else:
        data_header = [
                ("Engine name", 30),
                ("Environment name", 30),
                ("Job name", 30),  
                ("Ruleset Type", 12),
                ("ExecId", 6),                 
                ("Started", 20),                                                              
                ("Completed", 20),
                ("Status", 20),
                ("Runtime", 20)                  
                ]


    data.create_header(data_header)
    data.format_type = p_format

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue

        # load all objects
        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulesetlist = DxRulesetList(envname)
        connectorlist = DxConnectorsList(envname)
        if jobtype == 'masking':
            joblist = DxJobsList()
        else:
            joblist = DxProfileJobsList()

        logger.debug("Envname is %s, job name is %s" % (envname, jobname))

        joblist.LoadJobs(envname)
        #rulesetlist.LoadRulesets(envname)
        #connectorlist.LoadConnectors(envname)

        if jobname is None:
            jobs = joblist.get_allref()
        else:
            jobs = joblist.get_all_jobId_by_name(jobname)
            if jobs is None:
                ret = ret + 1
                continue

        for jobref in jobs:
            jobobj = joblist.get_by_ref(jobref)

            rulesetobj = rulesetlist.get_by_ref(jobobj.ruleset_id)
            # those test are requierd for 5.X engies where API is not showing all types of connectors
            if rulesetobj is not None:
                ruleset_type = rulesetobj.type
                rulename = rulesetobj.ruleset_name
                connectorobj = connectorlist.get_by_ref(rulesetobj.connectorId)
                if connectorobj is not None:
                    connectorname = connectorobj.connector_name
                    envobj = envlist.get_by_ref(connectorobj.environment_id)
                    if envobj is not None:
                        envobjname = envobj.environment_name
                    else:
                        envobjname = "N/A"   
                else:
                    connectorname = "N/A"
                    envobjname = "N/A"
            else:
                rulename = "N/A"
                connectorname = "N/A"
                envobjname = "N/A"
                ruleset_type = "N/A"


            if last:
                lastonly = True
            else:
                lastonly = False


            if lastonly:
                execlist = [ jobobj.lastExec ]
            else:
                if startdate or enddate:
                    execlist = jobobj.filter_executions(startdate, enddate)
                else:
                    execlist = jobobj.execList

            if execlist:

                for jobexec in execlist:



                    if details == False:

                        if jobtype == 'masking':

                            if jobexec is not None:
                                status = jobexec.status
                                execid = jobexec.execution_id
                                rowsmasked = jobexec.rows_masked
                                rowstotal = jobexec.rows_total
                                if jobexec.start_time is not None:
                                    starttime = jobexec.start_time.strftime("%Y-%m-%d %H:%M:%S")
                                else:
                                    starttime = 'N/A'
                                if (jobexec.end_time is not None) and \
                                (jobexec.start_time is not None):
                                    endtime = jobexec.end_time \
                                        .strftime("%Y-%m-%d %H:%M:%S")
                                    runtimetemp = jobexec.end_time \
                                        - jobexec.start_time
                                    runtime = str(runtimetemp)
                                else:
                                    endtime = 'N/A'
                                    runtime = 'N/A'
                            else:
                                status = 'N/A'
                                endtime = 'N/A'
                                starttime = 'N/A'
                                runtime = 'N/A'
                                execid = 'N/A'
                                rowsmasked = 'N/A'
                                rowstotal = 'N/A'

                            data.data_insert(
                                            engine_tuple[0],
                                            envobjname,
                                            jobobj.job_name,
                                            jobobj.masking_job_id,
                                            jobobj.min_memory,
                                            jobobj.max_memory,
                                            jobobj.num_input_streams,
                                            jobobj.on_the_fly_masking,
                                            ruleset_type,
                                            execid,
                                            rowstotal,
                                            rowsmasked,
                                            starttime,
                                            endtime,
                                            status,
                                            runtime
                                            )
                        else:

                            if jobexec is not None:
                                status = jobexec.status
                                execid = jobexec.execution_id
                                if jobexec.start_time is not None:
                                    starttime = jobexec.start_time.strftime("%Y-%m-%d %H:%M:%S")
                                else:
                                    starttime = 'N/A'
                                if (jobexec.end_time is not None) and \
                                (jobexec.start_time is not None):
                                    endtime = jobexec.end_time \
                                        .strftime("%Y-%m-%d %H:%M:%S")
                                    runtimetemp = jobexec.end_time \
                                        - jobexec.start_time
                                    runtime = str(runtimetemp)
                                else:
                                    endtime = 'N/A'
                                    runtime = 'N/A'
                            else:
                                status = 'N/A'
                                endtime = 'N/A'
                                starttime = 'N/A'
                                runtime = 'N/A'
                                execid = 'N/A'


                            data.data_insert(
                                            engine_tuple[0],
                                            envobjname,
                                            jobobj.job_name,
                                            ruleset_type,
                                            execid,
                                            starttime,
                                            endtime,
                                            status,
                                            runtime
                                            )

                    else:
                        # details here      
                        if jobexec is not None:         
                            execid = jobexec.execution_id
                            complist = jobobj.list_execution_component(execid)

                            if complist is not None:
                                for comp in complist:    
                                    status = comp.status
                                    rowsmasked = comp.rows_masked
                                    metaname = comp.component_name
                                    if comp.start_time is not None:
                                        starttime = comp.start_time.strftime("%Y-%m-%d %H:%M:%S")
                                    else:
                                        starttime = 'N/A'
                                    if (comp.end_time is not None) and \
                                    (comp.start_time is not None):
                                        endtime = comp.end_time \
                                            .strftime("%Y-%m-%d %H:%M:%S")
                                        runtimetemp = comp.end_time \
                                            - comp.start_time
                                        runtime = str(runtimetemp)
                                    else:
                                        endtime = 'N/A'
                                        runtime = 'N/A'

                                    data.data_insert(
                                                    engine_tuple[0],
                                                    envobjname,
                                                    jobobj.job_name,
                                                    execid,
                                                    metaname,
                                                    rowsmasked,
                                                    starttime,
                                                    endtime,
                                                    status,
                                                    runtime
                                                    )
                        else:
                            print("setting 1")
                            ret = 1


                    
                else:
                    # no executions
                    ret = 1

            else:
                # no executions
                ret = 1


        print("")
        print (data.data_output(False))
        print("")
        return ret
Beispiel #17
0
def jobs_list_worker(p_engine, p_username,  jobname, envname, p_format, joblist_class):
    """
    Print list of jobs
    param1: p_engine: engine name from configuration
    param2: jobname: job name to list
    param3: envname: environemnt name to list jobs from
    param4: p_format: output format
    param5: joblist_class - DxJobsList, DxProfileJobslist
    return 0 if environment found
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    data = DataFormatter()
    data_header = [
                    ("Engine name", 30),
                    ("Job name", 30),
                    ("Ruleset name", 30),
                    ("Connector name", 30),
                    ("Environment name", 30),
                    ("Completed", 20),
                    ("Status", 20),
                    ("Runtime", 20)
                  ]
    data.create_header(data_header)
    data.format_type = p_format

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue

        # load all objects
        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulesetlist = DxRulesetList(envname)
        connectorlist = DxConnectorsList(envname)
        joblist = globals()[joblist_class]()

        logger.debug("Envname is %s, job name is %s" % (envname, jobname))

        joblist.LoadJobs(envname)
        #rulesetlist.LoadRulesets(envname)
        #connectorlist.LoadConnectors(envname)

        if jobname is None:
            jobs = joblist.get_allref()
        else:
            jobs = joblist.get_all_jobId_by_name(jobname)
            if jobs is None:
                ret = ret + 1
                continue

        for jobref in jobs:
            jobobj = joblist.get_by_ref(jobref)

            rulesetobj = rulesetlist.get_by_ref(jobobj.ruleset_id)
            # those test are requierd for 5.X engies where API is not showing all types of connectors
            if rulesetobj is not None:
                rulename = rulesetobj.ruleset_name
                connectorobj = connectorlist.get_by_ref(rulesetobj.connectorId)
                if connectorobj is not None:
                    connectorname = connectorobj.connector_name
                    envobj = envlist.get_by_ref(connectorobj.environment_id)
                    if envobj is not None:
                        envobjname = envobj.environment_name
                    else:
                         envobjname = "N/A"   
                else:
                    connectorname = "N/A"
                    envobjname = "N/A"
            else:
                rulename = "N/A"
                connectorname = "N/A"
                envobjname = "N/A"

            if jobobj.lastExec is not None:
                status = jobobj.lastExec.status
                if (jobobj.lastExec.end_time is not None) and \
                   (jobobj.lastExec.end_time is not None):
                    endtime = jobobj.lastExec.end_time \
                        .strftime("%Y-%m-%d %H:%M:%S")
                    runtimetemp = jobobj.lastExec.end_time \
                        - jobobj.lastExec.start_time
                    runtime = str(runtimetemp)
                else:
                    endtime = 'N/A'
                    runtime = 'N/A'
            else:
                status = 'N/A'
                endtime = 'N/A'
                runtime = 'N/A'

            data.data_insert(
                              engine_tuple[0],
                              jobobj.job_name,
                              rulename,
                              connectorname,
                              envobjname,
                              endtime,
                              status,
                              runtime
                            )
        print("")
        print (data.data_output(False))
        print("")
        return ret
Beispiel #18
0
def connector_list(p_engine, format, envname, connector_name, details):
    """
    Print list of connectors
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: envname: environemnt name filter for connectors
    param4: connector_name: connector name to list
    param5: details: print connector details
    return 0 if connector found
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    data = DataFormatter()

    if details:
        data_header = [("Engine name", 30), ("Environment name", 30),
                       ("Connector name", 30), ("Connector type", 15),
                       ("Hostname", 30), ("Port", 5), ("Schema name", 30),
                       ("Type depended", 100)]
    else:
        data_header = [("Engine name", 30), ("Environment name", 30),
                       ("Connector name", 30), ("Connector type", 15)]

    data.create_header(data_header)
    data.format_type = format
    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        # connlist = DxConnectorsList()
        # envlist = DxEnvironmentList()
        # envlist.LoadEnvironments()
        # connlist.LoadConnectors(envname)

        DxConnectorsList(envname)

        if connector_name is None:
            connectors = DxConnectorsList.get_allref()
        else:
            connectors = DxConnectorsList.get_all_connectorId_by_name(
                connector_name)
            if connectors is None:
                ret = ret + 1
                continue

        for connref in connectors:
            connobj = DxConnectorsList.get_by_ref(connref)
            if details:
                rest = ''.join([
                    '%s = %s ' % (key, value)
                    for (key, value) in connobj.get_type_properties().items()
                ])
                data.data_insert(
                    engine_tuple[0],
                    DxEnvironmentList.get_by_ref(
                        connobj.environment_id).environment_name,
                    connobj.connector_name, connobj.connector_type,
                    connobj.host, connobj.port, connobj.schema_name, rest)
            else:
                data.data_insert(
                    engine_tuple[0],
                    DxEnvironmentList.get_by_ref(
                        connobj.environment_id).environment_name,
                    connobj.connector_name, connobj.connector_type)

    print("")
    print(data.data_output(False))
    print("")
    return ret
Beispiel #19
0
def algorithm_list(p_engine, p_username, format, algname):
    """
    Print list of algorithms
    param1: p_engine: engine name from configuration
    param2: format: output format
    param2: algname: algname name to list, all if None
    return 0 if algname found
    """

    ret = 0

    data = DataFormatter()
    data_header = [
        ("Engine name", 30),
        ("Algorithm name", 30),
        ("Domain name", 32),
        ("Syncable", 9),
        ("Algorithm type", 30),
    ]
    data.create_header(data_header)
    data.format_type = format
    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        #domainlist.LoadDomains()

        alglist = DxAlgorithmList()
        #alglist.LoadAlgorithms()

        algref_list = []

        if algname:
            algobj = alglist.get_by_ref(algname)
            if algobj:
                algref_list.append(algobj.algorithm_name)
        else:
            algref_list = alglist.get_allref()

        for algref in algref_list:
            algobj = alglist.get_by_ref(algref)

            if algobj.sync:
                syncable = 'Y'
            else:
                syncable = 'N'

            data.data_insert(engine_tuple[0], algobj.algorithm_name,
                             algobj.domain_name, syncable,
                             algobj.algorithm_type)

            #algobj.export()

        print("")
        print(data.data_output(False))
        print("")

    return ret
def fileformat_list(p_engine, format, fileformat_type, fileformat_name):
    """
    Print list of file formats
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: fileformat_type: file format type
    param4: fileformat_name: file format name
    return 0 if environment found
    """

    ret = 0

    logger = logging.getLogger()
    logger.debug("fileformat type %s fileformat name %s" %
                 (fileformat_type, fileformat_name))

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    data = DataFormatter()
    data_header = [("Engine name", 30), ("File format type", 30),
                   ("File format name", 30)]
    data.create_header(data_header)
    data.format_type = format
    for engine_tuple in enginelist:

        engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
                                     engine_tuple[2], engine_tuple[3])
        if engine_obj.get_session():
            continue

        fileformatList = DxFileFormatList()
        # load all objects

        if fileformat_name:
            fileformats_name = fileformatList.get_all_file_format_id_by_name(
                fileformat_name)
            if fileformats_name is None:
                ret = ret + 1
                fileformats_name = []
        else:
            fileformats_name = fileformatList.get_allref()

        if fileformat_type:
            fileformats_type = fileformatList.get_all_file_format_id_by_type(
                fileformat_type)
            if fileformats_type is None:
                ret = ret + 1
                fileformats_type = []
        else:
            fileformats_type = fileformats_name

        fileformats = list(set(fileformats_name) & set(fileformats_type))

        if fileformats:
            for fileformatref in fileformats:
                fileformatobj = fileformatList.get_by_ref(fileformatref)
                data.data_insert(engine_tuple[0],
                                 fileformatobj.file_format_type,
                                 fileformatobj.file_format_name)
        else:
            if fileformat_type and fileformat_name:
                ret = ret + 1

    print("")
    print(data.data_output(False))
    print("")
    return ret
Beispiel #21
0
def ruleset_listmeta(p_engine, format, rulesetname, envname, metaname):
    """
    List tables/file from ruleset
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: rulesetname: ruleset name to display metadata from
    param4: envname: environemnt name to display metadata from
    param5: metamame: name of table/file to display
    return 0 if added, non zero for error
    """

    ret = 0
    found = False

    data = DataFormatter()
    data_header = [
                    ("Engine name", 30),
                    ("Environent name", 30),
                    ("Ruleset name", 30),
                    ("Metadata type", 15),
                    ("Metadata name", 32)
                  ]
    data.create_header(data_header)
    data.format_type = format
    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        connlist = DxConnectorsList()
        connlist.LoadConnectors(envname)

        if rulesetname:
            rulesetref_list = rulelist.get_all_rulesetId_by_name(rulesetname)
            if rulesetref_list is None:
                ret = ret + 1
                continue
        else:
            rulesetref_list = rulelist.get_allref()
            if rulesetref_list is None:
                continue

        metalist = DxMetaList()

        for ruleref in rulesetref_list:
            ruleobj = rulelist.get_by_ref(ruleref)
            connobj = connlist.get_by_ref(ruleobj.connectorId)
            if connobj:
                envobj = envlist.get_by_ref(connobj.environment_id)
                environment_name = envobj.environment_name
            else:
                environment_name = 'N/A'

            metalist.LoadMeta(ruleobj.ruleset_id)

            if metaname:
                metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1)
                if metalist_ref is None:
                    ret = ret + 1
                    continue
                found = True
            else:
                metalist_ref = metalist.get_allref()
                if metalist_ref is None:
                    continue

            for metaid in metalist_ref:
                metaobj = metalist.get_by_ref(metaid)
                data.data_insert(
                                  engine_tuple[0],
                                  environment_name,
                                  ruleobj.ruleset_name,
                                  ruleobj.type,
                                  metaobj.meta_name
                                )

    print("")
    print (data.data_output(False))
    print("")

    if found:
        return 0
    else:
        if metaname:
            print_error("Table or file %s not found" % metaname)
        return ret
Beispiel #22
0
def tab_list_details(p_engine, p_format, rulesetname, envname, metaname, what):
    """
    List details of tables/file from ruleset
    param1: p_engine: engine name from configuration
    param2: p_format: output format
    param3: rulesetname: ruleset name to display metadata from
    param4: envname: environemnt name to display metadata from
    param5: metamame: name of table/file to display
    param6: what - Database/File
    return 0 if added, non zero for error
    """

    ret = 0
    found = False

    data = DataFormatter()

    if what == 'Database':
        data_header = [("Engine name", 30), ("Environent name", 30),
                       ("Ruleset name", 30), ("Table name", 32),
                       ("Logical key", 32), ("Where clause", 50),
                       ("Custom SQL", 50)]
    else:
        data_header = [("Engine name", 30), ("Environent name", 30),
                       ("Ruleset name", 30), ("File name", 32),
                       ("File type", 32), ("File format name", 32),
                       ("Delimiter", 10), ("End of record", 10)]

    data.create_header(data_header)

    data.format_type = p_format

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
                                     engine_tuple[2], engine_tuple[3])

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        connlist = DxConnectorsList()
        connlist.LoadConnectors(envname)

        if rulesetname:
            rulesetref_all = rulelist.get_all_rulesetId_by_name(rulesetname)
            if rulesetref_all is None:
                ret = ret + 1
                continue
            rulesetref_list = [
                x for x in rulesetref_all
                if rulelist.get_by_ref(x).type == what
            ]
            if rulesetref_list is None:
                ret = ret + 1
                continue
        else:
            if what == 'Database':
                rulesetref_list = rulelist.get_all_database_rulesetIds()
                if rulesetref_list is None:
                    continue
            else:
                rulesetref_list = rulelist.get_all_file_rulesetIds()
                if rulesetref_list is None:
                    continue

        filetypelist = DxFileFormatList()
        metalist = DxMetaList()

        for ruleref in rulesetref_list:
            ruleobj = rulelist.get_by_ref(ruleref)
            connobj = connlist.get_by_ref(ruleobj.connectorId)
            if connobj:
                envobj = envlist.get_by_ref(connobj.environment_id)
                environment_name = envobj.environment_name
            else:
                environment_name = 'N/A'

            metalist.LoadMeta(ruleobj.ruleset_id)

            if metaname:
                metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1)
                if metalist_ref is None:
                    ret = ret + 1
                    continue
                found = True
            else:
                metalist_ref = metalist.get_allref()
                if metalist_ref is None:
                    continue

            for metaid in metalist_ref:
                metaobj = metalist.get_by_ref(metaid)
                if what == 'Database':
                    data.data_insert(engine_tuple[0], environment_name,
                                     ruleobj.ruleset_name, metaobj.meta_name,
                                     metaobj.key_column,
                                     repr(metaobj.where_clause),
                                     repr(metaobj.custom_sql))
                else:
                    if metaobj.file_format_id is not None:
                        fileformatobj = filetypelist.get_by_ref(
                            metaobj.file_format_id)
                        fileformatname = fileformatobj.file_format_name
                    else:
                        fileformatname = 'N/A'

                    data.data_insert(engine_tuple[0], environment_name,
                                     ruleobj.ruleset_name, metaobj.meta_name,
                                     metaobj.file_type, fileformatname,
                                     metaobj.delimiter,
                                     repr(metaobj.end_of_record))

    print("")
    print(data.data_output(False))
    print("")

    if found:
        return 0
    else:
        if metaname:
            print_error("Table %s not found" % metaname)
        return ret
Beispiel #23
0
def column_save(p_engine, sortby, rulesetname, envname, metaname, columnname,
                algname, is_masked, file, inventory):
    """
    Print column list
    param1: p_engine: engine name from configuration
    param2: sortby: sort by output if needed
    param3: rulesetname: ruleset name
    param4: envname: environment name
    param5: metaname: meta name (table or file)
    param6: columnname: column name (column or field)
    param7: algname: algorithm name to filter
    param8: is_masked: is masked fileter
    param9: file: file to write output
    return 0 if no issues
    """

    if p_engine == 'all':
        print_error("you can't run column save command on all engines"
                    "at same time")
        return 1

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    engine_tuple = enginelist[-1]

    engine_obj = DxMaskingEngine(engine_tuple)

    if engine_obj.get_session():
        return 1

    rulelist = DxRulesetList(envname)
    ruleref = rulelist.get_rulesetId_by_name(rulesetname)

    ruleobj = rulelist.get_by_ref(ruleref)

    if ruleobj is None:
        return 1

    if ruleobj.type == "Database":
        data = DataFormatter()
        data_header = [("Table Name", 32), ("Type", 5),
                       ("Parent Column Name", 5), ("Column Name", 32),
                       ("Data Type", 32), ("Domain", 32), ("Algorithm", 32),
                       ("Is Masked", 32), ("ID Method", 32), ("Row Type", 32),
                       ("Date Format", 32)]
        worker = "do_save_database"
    else:
        data = DataFormatter()
        data_header = [("File Name", 32), ("Field Name", 5), ("Domain", 32),
                       ("Algorithm", 32), ("Is Masked", 32), ("Priority", 8),
                       ("Record Type", 15), ("Position", 8), ("Length", 8),
                       ("Date Format", 32)]
        worker = "do_save_file"

    if inventory is True:
        data_header = [("Environment Name", 32),
                       ("Rule Set", 32)] + data_header

    data.create_header(data_header, inventory)
    data.format_type = "csv"

    ret = column_worker(p_engine,
                        sortby,
                        rulesetname,
                        envname,
                        metaname,
                        columnname,
                        algname,
                        is_masked,
                        None,
                        None,
                        None,
                        worker,
                        data=data,
                        inventory=inventory)

    if ret == 0:
        output = data.data_output(False, sortby)
        try:
            file.write(output)
            file.close()
            print_message("Columns saved to file %s" % file.name)
            return 0
        except Exception as e:
            print_error("Problem with file %s Error: %s" % (file.name, str(e)))
            return 1

    else:
        return ret