Exemplo n.º 1
0
def ruleset_listmeta(p_engine, p_username, format, rulesetname, envname,
                     metaname):
    """
    List tables/file from ruleset
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: rulesetname: ruleset name to display metadata from
    param4: envname: environemnt name to display metadata from
    param5: metamame: name of table/file to display
    return 0 if added, non zero for error
    """

    ret = 0
    found = False

    data = DataFormatter()
    data_header = [("Engine name", 30), ("Environent name", 30),
                   ("Ruleset name", 30), ("Metadata type", 15),
                   ("Metadata name", 32)]
    data.create_header(data_header)
    data.format_type = format
    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList(envname)
        #rulelist.LoadRulesets()
        connlist = DxConnectorsList(envname)
        #connlist.LoadConnectors()

        if rulesetname:
            rulesetref_list = rulelist.get_all_rulesetId_by_name(rulesetname)
            if rulesetref_list is None:
                ret = ret + 1
                continue
        else:
            rulesetref_list = rulelist.get_allref()
            if rulesetref_list is None:
                continue

        metalist = DxMetaList()

        for ruleref in rulesetref_list:
            ruleobj = rulelist.get_by_ref(ruleref)
            connobj = connlist.get_by_ref(ruleobj.connectorId)
            if connobj:
                envobj = envlist.get_by_ref(connobj.environment_id)
                environment_name = envobj.environment_name
            else:
                environment_name = 'N/A'

            metalist.LoadMeta(ruleobj.ruleset_id)

            if metaname:
                metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1)
                if metalist_ref is None:
                    ret = ret + 1
                    continue
                found = True
            else:
                metalist_ref = metalist.get_allref()
                if metalist_ref is None:
                    continue

            for metaid in metalist_ref:
                metaobj = metalist.get_by_ref(metaid)
                data.data_insert(engine_tuple[0], environment_name,
                                 ruleobj.ruleset_name, ruleobj.type,
                                 metaobj.meta_name)

    print("")
    print(data.data_output(False))
    print("")

    if found:
        return 0
    else:
        if metaname:
            print_error("Table or file %s not found" % metaname)
        return ret
Exemplo n.º 2
0
def tab_list_details(p_engine, p_format, rulesetname, envname, metaname, what):
    """
    List details of tables/file from ruleset
    param1: p_engine: engine name from configuration
    param2: p_format: output format
    param3: rulesetname: ruleset name to display metadata from
    param4: envname: environemnt name to display metadata from
    param5: metamame: name of table/file to display
    param6: what - Database/File
    return 0 if added, non zero for error
    """

    ret = 0
    found = False

    data = DataFormatter()

    if what == 'Database':
        data_header = [("Engine name", 30), ("Environent name", 30),
                       ("Ruleset name", 30), ("Table name", 32),
                       ("Logical key", 32), ("Where clause", 50),
                       ("Custom SQL", 50)]
    else:
        data_header = [("Engine name", 30), ("Environent name", 30),
                       ("Ruleset name", 30), ("File name", 32),
                       ("File type", 32), ("File format name", 32),
                       ("Delimiter", 10), ("End of record", 10)]

    data.create_header(data_header)

    data.format_type = p_format

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulelist = DxRulesetList()
        rulelist.LoadRulesets(envname)
        connlist = DxConnectorsList()
        connlist.LoadConnectors(envname)

        if rulesetname:
            rulesetref_all = rulelist.get_all_rulesetId_by_name(rulesetname)
            if rulesetref_all is None:
                ret = ret + 1
                continue
            rulesetref_list = [
                x for x in rulesetref_all
                if rulelist.get_by_ref(x).type == what
            ]
            if rulesetref_list is None:
                ret = ret + 1
                continue
        else:
            if what == 'Database':
                rulesetref_list = rulelist.get_all_database_rulesetIds()
                if rulesetref_list is None:
                    continue
            else:
                rulesetref_list = rulelist.get_all_file_rulesetIds()
                if rulesetref_list is None:
                    continue

        filetypelist = DxFileFormatList()
        metalist = DxMetaList()

        for ruleref in rulesetref_list:
            ruleobj = rulelist.get_by_ref(ruleref)
            connobj = connlist.get_by_ref(ruleobj.connectorId)
            if connobj:
                envobj = envlist.get_by_ref(connobj.environment_id)
                environment_name = envobj.environment_name
            else:
                environment_name = 'N/A'

            metalist.LoadMeta(ruleobj.ruleset_id)

            if metaname:
                metalist_ref = metalist.get_all_MetadataId_by_name(metaname, 1)
                if metalist_ref is None:
                    ret = ret + 1
                    continue
                found = True
            else:
                metalist_ref = metalist.get_allref()
                if metalist_ref is None:
                    continue

            for metaid in metalist_ref:
                metaobj = metalist.get_by_ref(metaid)
                if what == 'Database':
                    data.data_insert(engine_tuple[0], environment_name,
                                     ruleobj.ruleset_name, metaobj.meta_name,
                                     metaobj.key_column,
                                     repr(metaobj.where_clause),
                                     repr(metaobj.custom_sql))
                else:
                    if metaobj.file_format_id is not None:
                        fileformatobj = filetypelist.get_by_ref(
                            metaobj.file_format_id)
                        fileformatname = fileformatobj.file_format_name
                    else:
                        fileformatname = 'N/A'

                    data.data_insert(engine_tuple[0], environment_name,
                                     ruleobj.ruleset_name, metaobj.meta_name,
                                     metaobj.file_type, fileformatname,
                                     metaobj.delimiter,
                                     repr(metaobj.end_of_record))

    print("")
    print(data.data_output(False))
    print("")

    if found:
        return 0
    else:
        if metaname:
            print_error("Table %s not found" % metaname)
        return ret
Exemplo n.º 3
0
def algorithm_list(p_engine, p_username, format, algname):
    """
    Print list of algorithms
    param1: p_engine: engine name from configuration
    param2: format: output format
    param2: algname: algname name to list, all if None
    return 0 if algname found
    """

    ret = 0

    data = DataFormatter()
    data_header = [
        ("Engine name", 30),
        ("Algorithm name", 30),
        ("Domain name", 32),
        ("Syncable", 9),
        ("Algorithm type", 30),
    ]
    data.create_header(data_header)
    data.format_type = format
    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        #domainlist.LoadDomains()

        alglist = DxAlgorithmList()
        #alglist.LoadAlgorithms()

        algref_list = []

        if algname:
            algobj = alglist.get_by_ref(algname)
            if algobj:
                algref_list.append(algobj.algorithm_name)
        else:
            algref_list = alglist.get_allref()

        for algref in algref_list:
            algobj = alglist.get_by_ref(algref)

            if algobj.sync:
                syncable = 'Y'
            else:
                syncable = 'N'

            data.data_insert(engine_tuple[0], algobj.algorithm_name,
                             algobj.domain_name, syncable,
                             algobj.algorithm_type)

            #algobj.export()

        print("")
        print(data.data_output(False))
        print("")

    return ret
Exemplo n.º 4
0
def jobs_list_worker(p_engine, jobname, envname, p_format, joblist_class):
    """
    Print list of jobs
    param1: p_engine: engine name from configuration
    param2: jobname: job name to list
    param3: envname: environemnt name to list jobs from
    param4: p_format: output format
    param5: joblist_class - DxJobsList, DxProfileJobslist
    return 0 if environment found
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    data = DataFormatter()
    data_header = [("Engine name", 30), ("Job name", 30), ("Ruleset name", 30),
                   ("Connector name", 30), ("Environment name", 30),
                   ("Completed", 20), ("Status", 20), ("Runtime", 20)]
    data.create_header(data_header)
    data.format_type = p_format

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue

        # load all objects
        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulesetlist = DxRulesetList()
        connectorlist = DxConnectorsList()
        joblist = globals()[joblist_class]()

        logger.debug("Envname is %s, job name is %s" % (envname, jobname))

        joblist.LoadJobs(envname)
        rulesetlist.LoadRulesets(envname)
        connectorlist.LoadConnectors(envname)

        if jobname is None:
            jobs = joblist.get_allref()
        else:
            jobs = joblist.get_all_jobId_by_name(jobname)
            if jobs is None:
                ret = ret + 1
                continue

        for jobref in jobs:
            jobobj = joblist.get_by_ref(jobref)
            rulesetobj = rulesetlist.get_by_ref(jobobj.ruleset_id)
            connectorobj = connectorlist.get_by_ref(rulesetobj.connectorId)
            envobj = envlist.get_by_ref(connectorobj.environment_id)

            if jobobj.lastExec is not None:
                status = jobobj.lastExec.status
                if (jobobj.lastExec.end_time is not None) and \
                   (jobobj.lastExec.end_time is not None):
                    endtime = jobobj.lastExec.end_time \
                        .strftime("%Y-%m-%d %H:%M:%S")
                    runtimetemp = jobobj.lastExec.end_time \
                        - jobobj.lastExec.start_time
                    runtime = str(runtimetemp)
                else:
                    endtime = 'N/A'
                    runtime = 'N/A'
            else:
                status = 'N/A'
                endtime = 'N/A'
                runtime = 'N/A'

            data.data_insert(engine_tuple[0], jobobj.job_name,
                             rulesetobj.ruleset_name,
                             connectorobj.connector_name,
                             envobj.environment_name, endtime, status, runtime)
        print("")
        print(data.data_output(False))
        print("")
        return ret
Exemplo n.º 5
0
def column_save(p_engine, p_username,  sortby, rulesetname, envname, metaname, columnname,
                algname, is_masked, file, inventory):
    """
    Print column list
    param1: p_engine: engine name from configuration
    param2: sortby: sort by output if needed
    param3: rulesetname: ruleset name
    param4: envname: environment name
    param5: metaname: meta name (table or file)
    param6: columnname: column name (column or field)
    param7: algname: algorithm name to filter
    param8: is_masked: is masked fileter
    param9: file: file to write output
    return 0 if no issues
    """

    if p_engine == 'all':
        print_error("you can't run column save command on all engines"
                    "at same time")
        return 1

    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    engine_tuple = enginelist[-1]

    engine_obj = DxMaskingEngine(engine_tuple)

    if engine_obj.get_session():
        return 1

    rulelist = DxRulesetList(envname)
    ruleref = rulelist.get_rulesetId_by_name(rulesetname)

    ruleobj = rulelist.get_by_ref(ruleref)

    if ruleobj is None:
        return 1

    if ruleobj.type == "Database":
        data = DataFormatter()
        data_header = [
                        ("Table Name", 32),
                        ("Type", 5),
                        ("Parent Column Name", 5),
                        ("Column Name", 32),
                        ("Data Type", 32),
                        ("Domain", 32),
                        ("Algorithm", 32),
                        ("Is Masked", 32),
                        ("ID Method", 32),
                        ("Row Type", 32),
                        ("Date Format", 32)
                      ]
        worker = "do_save_database"

        if inventory is True:
            data_header = data_header + [("Notes",30)]

        if engine_obj.version_ge("6.0.8"):
            data_header = data_header + [("Multi-Column Logical Field", 10),
                                         ("Group Number", 10)]      

    else:
        data = DataFormatter()
        data_header = [
                        ("File Name", 32),
                        ("Field Name", 5),
                        ("Domain", 32),
                        ("Algorithm", 32),
                        ("Is Masked", 32),
                        ("Priority", 8),
                        ("Record Type", 15),
                        ("Position", 8),
                        ("Length", 8),
                        ("Date Format", 32)
                      ]
        worker = "do_save_file"

        if inventory is True:
            data_header = data_header + [("Notes",30)]

        if engine_obj.version_ge("6.0.8"):
            data_header = data_header + [("Multi-Column Logical Field", 10),
                                         ("Group Number", 10)]  

    if inventory is True:
        data_header = [("Environment Name", 32),
                       ("Rule Set", 32)] + data_header



    data.create_header(data_header, inventory)
    data.format_type = "csv"

    ret = column_worker(
        p_engine, p_username, sortby, rulesetname, envname, metaname, columnname,
        algname, is_masked, None, None,
        None, worker, data=data, inventory=inventory)

    if ret == 0:
        output = data.data_output(False, sortby)
        try:
            file.write(output)
            file.close()
            print_message("Columns saved to file %s" % file.name)
            return 0
        except Exception as e:
            print_error("Problem with file %s Error: %s" %
                        (file.name, str(e)))
            return 1

    else:
        return ret
Exemplo n.º 6
0
def user_list(p_engine, p_username, format, username):
    """
    Print list of users
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: username: user name to list, all if None
    return 0 if role found
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine, p_username)

    if enginelist is None:
        return 1

    data = DataFormatter()
    data_header = [("Engine name", 30), ("User name", 30), ("First name", 30),
                   ("Last name", 30), ("E-mail", 30), ("Auth type", 10),
                   ("Principal", 30), ("Role name", 30), ("Locked", 6),
                   ("Environment list", 30)]
    data.create_header(data_header)
    data.format_type = format
    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue
        userlist = DxUserList()
        rolelist = DxRoleList()
        envlist = DxEnvironmentList()

        # check if ldap is configured
        appsettingslist = DxAppSettingList()
        ldapobj = appsettingslist.get_appSetting_by_group_and_name(
            'ldap', 'Enable')

        if ldapobj.setting_value == 'true':
            msadobj = appsettingslist.get_appSetting_by_group_and_name(
                'ldap', 'MsadDomain')
        else:
            msadobj = None

        if username is None:
            users = userlist.get_allref()
        else:
            user = userlist.get_userId_by_name(username)

            if user is None:
                ret = ret + 1
                continue
            users = [user]

        for userref in users:
            userobj = userlist.get_by_ref(userref)

            if not userobj.is_admin:
                roleobj = rolelist.get_by_ref(
                    userobj.non_admin_properties.role_id)
                if roleobj is not None:
                    rolename = roleobj.role_name
                else:
                    rolename = 'Not Found'
            else:
                rolename = 'Administrator'

            if userobj.non_admin_properties is not None:
                if userobj.non_admin_properties.environment_ids:
                    envs = ';'.join([
                        envlist.get_by_ref(x).environment_name
                        for x in userobj.non_admin_properties.environment_ids
                    ])
                else:
                    envs = ''
            else:
                envs = ''

            if userobj.is_locked is not None:
                if userobj.is_locked is True:
                    locked = 'Locked'
                else:
                    locked = 'Open'
            else:
                locked = 'N/A'

            if msadobj is None:
                authtype = 'NATIVE'
                principal = ''
            else:
                authtype = 'LDAP'
                principal = '{}@{}'.format(userobj.user_name,
                                           msadobj.setting_value)

            data.data_insert(engine_tuple[0], userobj.user_name,
                             userobj.first_name, userobj.last_name,
                             userobj.email, authtype, principal, rolename,
                             locked, envs)
    print("")
    print(data.data_output(False))
    print("")
    return ret
Exemplo n.º 7
0
def column_save(p_engine, sortby, rulesetname, envname, metaname, columnname,
                algname, is_masked, file):
    """
    Print column list
    param1: p_engine: engine name from configuration
    param2: sortby: sort by output if needed
    param3: rulesetname: ruleset name
    param4: envname: environment name
    param5: metaname: meta name (table or file)
    param6: columnname: column name (column or field)
    param7: algname: algorithm name to filter
    param8: is_masked: is masked fileter
    param9: file: file to write output
    return 0 if no issues
    """

    if p_engine == 'all':
        print_error("you can run column save command on all engines"
                    "at same time")
        return 1

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    engine_tuple = enginelist[-1]

    engine_obj = DxMaskingEngine(engine_tuple[0], engine_tuple[1],
                                 engine_tuple[2], engine_tuple[3])

    if engine_obj.get_session():
        return 1

    rulelist = DxRulesetList()
    rulelist.LoadRulesets(envname)
    ruleref = rulelist.get_rulesetId_by_name(rulesetname)

    ruleobj = rulelist.get_by_ref(ruleref)

    if ruleobj is None:
        return 1

    if ruleobj.type == "Database":
        data = DataFormatter()
        data_header = [("Table Name", 32), ("Type", 5),
                       ("Parent Column Name", 5), ("Column name", 32),
                       ("Data Type", 32), ("Domain", 32), ("Algorithm", 32),
                       ("Is masked", 32)]
        data.create_header(data_header)
        data.format_type = "csv"
        worker = "do_save_database"
    else:
        data = DataFormatter()
        data_header = [("File Name", 32), ("Field Name", 5), ("Domain", 32),
                       ("Algorithm", 32), ("Is masked", 32)]
        data.create_header(data_header)
        data.format_type = "csv"
        worker = "do_save_file"

    ret = column_worker(p_engine,
                        sortby,
                        rulesetname,
                        envname,
                        metaname,
                        columnname,
                        algname,
                        is_masked,
                        None,
                        None,
                        None,
                        worker,
                        data=data)

    if ret == 0:

        output = data.data_output(False, sortby)
        try:
            file.write(output)
            file.close()
            print_message("Columns saved to file %s" % file.name)
            return 0
        except Exception as e:
            print_error("Problem with file %s Error: %s" % (file.name, str(e)))
            return 1

    else:
        return 1
Exemplo n.º 8
0
def connector_list(p_engine, format, envname, connector_name, details):
    """
    Print list of connectors
    param1: p_engine: engine name from configuration
    param2: format: output format
    param3: envname: environemnt name filter for connectors
    param4: connector_name: connector name to list
    param5: details: print connector details
    return 0 if connector found
    """

    ret = 0

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    data = DataFormatter()

    if details:
        data_header = [("Engine name", 30), ("Environment name", 30),
                       ("Connector name", 30), ("Connector type", 10),
                       ("Hostname", 30), ("Port", 5), ("Schema name", 30),
                       ("Type depended", 100)]
    else:
        data_header = [("Engine name", 30), ("Environment name", 30),
                       ("Connector name", 30), ("Connector type", 30)]

    data.create_header(data_header)
    data.format_type = format
    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)

        if engine_obj.get_session():
            continue

        # connlist = DxConnectorsList()
        # envlist = DxEnvironmentList()
        # envlist.LoadEnvironments()
        # connlist.LoadConnectors(envname)

        DxConnectorsList(envname)

        if connector_name is None:
            connectors = DxConnectorsList.get_allref()
        else:
            connectors = DxConnectorsList.get_all_connectorId_by_name(
                connector_name)
            if connectors is None:
                ret = ret + 1
                continue

        for connref in connectors:
            connobj = DxConnectorsList.get_by_ref(connref)
            if details:
                rest = ''.join([
                    '%s = %s ' % (key, value)
                    for (key, value) in connobj.get_type_properties().items()
                ])
                data.data_insert(
                    engine_tuple[0],
                    DxEnvironmentList.get_by_ref(
                        connobj.environment_id).environment_name,
                    connobj.connector_name, connobj.connector_type,
                    connobj.host, connobj.port, connobj.schema_name, rest)
            else:
                data.data_insert(
                    engine_tuple[0],
                    DxEnvironmentList.get_by_ref(
                        connobj.environment_id).environment_name,
                    connobj.connector_name, connobj.connector_type)
        print("")
        print(data.data_output(False))
        print("")
        return ret
Exemplo n.º 9
0
def jobs_report_worker(p_engine, jobname, envname, p_format, last, startdate,
                       enddate, details):
    """
    Print report of jobs
    param1: p_engine: engine name from configuration
    param2: jobname: job name to list
    param3: envname: environemnt name to list jobs from
    param4: p_format: output format
    param5: joblist_class - DxJobsList, DxProfileJobslist
    return 0 if environment found
    """

    ret = 0

    logger = logging.getLogger()

    enginelist = get_list_of_engines(p_engine)

    if enginelist is None:
        return 1

    data = DataFormatter()

    if details:
        data_header = [("Engine name", 30), ("Environment name", 30),
                       ("Job name", 30), ("ExecId", 6), ("Meta name", 12),
                       ("Masked Rows", 10), ("Started", 20), ("Completed", 20),
                       ("Status", 20), ("Runtime", 20)]
    else:
        data_header = [("Engine name", 30), ("Environment name", 30),
                       ("Job name", 30), ("Job Id", 6), ("Min Memory", 10),
                       ("Max Memory", 10), ("Streams", 7), ("On The Fly", 10),
                       ("Ruleset Type", 12), ("ExecId", 6), ("Total Rows", 10),
                       ("Masked Rows", 10), ("Started", 20), ("Completed", 20),
                       ("Status", 20), ("Runtime", 20)]
    data.create_header(data_header)
    data.format_type = p_format

    for engine_tuple in enginelist:
        engine_obj = DxMaskingEngine(engine_tuple)
        if engine_obj.get_session():
            continue

        # load all objects
        envlist = DxEnvironmentList()
        envlist.LoadEnvironments()
        rulesetlist = DxRulesetList()
        connectorlist = DxConnectorsList()
        joblist = DxJobsList()

        logger.debug("Envname is %s, job name is %s" % (envname, jobname))

        joblist.LoadJobs(envname)
        rulesetlist.LoadRulesets(envname)
        connectorlist.LoadConnectors(envname)

        if jobname is None:
            jobs = joblist.get_allref()
        else:
            jobs = joblist.get_all_jobId_by_name(jobname)
            if jobs is None:
                ret = ret + 1
                continue

        for jobref in jobs:
            jobobj = joblist.get_by_ref(jobref)

            rulesetobj = rulesetlist.get_by_ref(jobobj.ruleset_id)
            # those test are requierd for 5.X engies where API is not showing all types of connectors
            if rulesetobj is not None:
                ruleset_type = rulesetobj.type
                rulename = rulesetobj.ruleset_name
                connectorobj = connectorlist.get_by_ref(rulesetobj.connectorId)
                if connectorobj is not None:
                    connectorname = connectorobj.connector_name
                    envobj = envlist.get_by_ref(connectorobj.environment_id)
                    if envobj is not None:
                        envobjname = envobj.environment_name
                    else:
                        envobjname = "N/A"
                else:
                    connectorname = "N/A"
                    envobjname = "N/A"
            else:
                rulename = "N/A"
                connectorname = "N/A"
                envobjname = "N/A"
                ruleset_type = "N/A"

            if last:
                lastonly = True
            else:
                lastonly = False

            if lastonly:
                execlist = [jobobj.lastExec]
            else:
                if startdate or enddate:
                    execlist = jobobj.filter_executions(startdate, enddate)
                else:
                    execlist = jobobj.execList

            if execlist:

                for jobexec in execlist:

                    if details == False:

                        if jobexec is not None:
                            status = jobexec.status
                            execid = jobexec.execution_id
                            rowsmasked = jobexec.rows_masked
                            rowstotal = jobexec.rows_total
                            if jobexec.start_time is not None:
                                starttime = jobexec.start_time.strftime(
                                    "%Y-%m-%d %H:%M:%S")
                            else:
                                starttime = 'N/A'
                            if (jobexec.end_time is not None) and \
                            (jobexec.start_time is not None):
                                endtime = jobexec.end_time \
                                    .strftime("%Y-%m-%d %H:%M:%S")
                                runtimetemp = jobexec.end_time \
                                    - jobexec.start_time
                                runtime = str(runtimetemp)
                            else:
                                endtime = 'N/A'
                                runtime = 'N/A'
                        else:
                            status = 'N/A'
                            endtime = 'N/A'
                            starttime = 'N/A'
                            runtime = 'N/A'
                            execid = 'N/A'
                            rowsmasked = 'N/A'
                            rowstotal = 'N/A'

                        data.data_insert(
                            engine_tuple[0], envobjname, jobobj.job_name,
                            jobobj.masking_job_id, jobobj.min_memory,
                            jobobj.max_memory, jobobj.num_input_streams,
                            jobobj.on_the_fly_masking, ruleset_type, execid,
                            rowstotal, rowsmasked, starttime, endtime, status,
                            runtime)

                    else:
                        # details here
                        if jobexec is not None:
                            execid = jobexec.execution_id
                            complist = jobobj.list_execution_component(execid)

                            if complist is not None:
                                for comp in complist:
                                    status = comp.status
                                    rowsmasked = comp.rows_masked
                                    metaname = comp.component_name
                                    if comp.start_time is not None:
                                        starttime = comp.start_time.strftime(
                                            "%Y-%m-%d %H:%M:%S")
                                    else:
                                        starttime = 'N/A'
                                    if (comp.end_time is not None) and \
                                    (comp.start_time is not None):
                                        endtime = comp.end_time \
                                            .strftime("%Y-%m-%d %H:%M:%S")
                                        runtimetemp = comp.end_time \
                                            - comp.start_time
                                        runtime = str(runtimetemp)
                                    else:
                                        endtime = 'N/A'
                                        runtime = 'N/A'

                                    data.data_insert(engine_tuple[0],
                                                     envobjname,
                                                     jobobj.job_name, execid,
                                                     metaname, rowsmasked,
                                                     starttime, endtime,
                                                     status, runtime)
                        else:
                            print("setting 1")
                            ret = 1

                else:
                    # no executions
                    ret = 1

            else:
                # no executions
                ret = 1

        print("")
        print(data.data_output(False))
        print("")
        return ret