Esempio n. 1
0
def test_process_control_detail_insert(mocker, setup):
    (mockdb, mockargv) = setup
    pc = ProcessControlDetail(mockargv, const.INITSYNC, 1)
    pc.insert(
        insert_row_count=1,
        update_row_count=2,
        alter_count=3,
        create_count=4,
    )
    mockdb.execute.assert_called_with(
        EXPECTED_PROCESS_CONTROL_DETAIL_INSERT_SQL,
        get_expected_process_control_detail_insert_values(mocker),
        log_sql=mocker.ANY)
Esempio n. 2
0
def clean_target_table(argv, target_db, target_table, query_condition,
                       process_control_id):
    sql = None
    mode = const.INITSYNCTRUNC

    if argv.delete or argv.truncate:
        pc_detail = ProcessControlDetail(argv, mode, process_control_id)
        pc_detail.insert(
            object_schema=target_table.schema,
            object_name=target_table.name,
            comment="Deleting/Truncating target".format(sql=sql),
            process_code=mode,
            infolog=logging_loader.get_logfile(const.INFO_HANDLER),
            errorlog=logging_loader.get_logfile(const.ERROR_HANDLER))

        if argv.delete:
            rowcount = target_db.delete(target_table, query_condition)
        elif argv.truncate:
            rowcount = target_db.truncate(target_table)

        comment = ("Deleted {count} {table} rows".format(
            count=rowcount, table=target_table.fullname))
        pc_detail.update(
            comment=comment,
            delete_row_count=rowcount,
            source_row_count=rowcount,
            status=const.SUCCESS,
        )
Esempio n. 3
0
def test_process_control_detail_delete(mocker, setup):
    expected_delete_sql = """
        DELETE
        FROM ctl.process_control_detail
        WHERE id = %s"""

    (mockdb, mockargv) = setup
    pc = ProcessControlDetail(mockargv, const.INITSYNC, 1)
    pc.insert(
        insert_row_count=1,
        update_row_count=2,
        alter_count=3,
        create_count=4,
    )
    pc.delete()
    mockdb.assert_has_calls([
        mocker.call.closed(),
        mocker.call.connect(mocker.ANY),
        mocker.call.execute(
            EXPECTED_PROCESS_CONTROL_DETAIL_INSERT_SQL,
            get_expected_process_control_detail_insert_values(mocker),
            log_sql=mocker.ANY),
        mocker.call.cursor.fetchone(),
        mocker.call.commit(),
        mocker.call.closed(),
        mocker.call.connect(mocker.ANY),
        mocker.call.execute(expected_delete_sql, [1], log_sql=mocker.ANY),
        mocker.call.commit()
    ])
Esempio n. 4
0
def test_process_control_detail_update(mocker, setup):
    expected_update_sql = """
        UPDATE ctl.process_control_detail SET
            comment = %s,
            object_name = %s,
            process_starttime = %s,
            process_endtime = %s,
            duration = %s
        WHERE id = %s"""

    (mockdb, mockargv) = setup
    pc = ProcessControlDetail(mockargv, const.INITSYNC, 1)
    pc.insert(
        insert_row_count=1,
        update_row_count=2,
        alter_count=3,
        create_count=4,
    )
    pc.update(comment='foo', object_name='bar')
    mockdb.assert_has_calls([
        mocker.call.closed(),
        mocker.call.connect(mocker.ANY),
        mocker.call.execute(
            EXPECTED_PROCESS_CONTROL_DETAIL_INSERT_SQL,
            get_expected_process_control_detail_insert_values(mocker),
            log_sql=mocker.ANY),
        mocker.call.cursor.fetchone(),
        mocker.call.commit(),
        mocker.call.closed(),
        mocker.call.connect(mocker.ANY),
        mocker.call.execute(
            expected_update_sql,
            ['foo', 'bar', mocker.ANY, mocker.ANY, mocker.ANY, 1],
            log_sql=mocker.ANY),
        mocker.call.commit()
    ])
Esempio n. 5
0
def apply(argv, file_path, table, column_list, target_db, process_control_id,
          apply_msg_queue):
    logging_loader.setup_logging(argv.workdirectory,
                                 "_".join([table.name, "apply"]))
    logger = logging.getLogger(__name__)

    mode = const.INITSYNCAPPLY
    message = "Starting {mode} for {table}".format(mode=mode,
                                                   table=table.fullname)

    logger.info(message)
    try:
        logger.debug("[{t}] Apply: Inserting process_control_detail record "
                     "for PC ID={id}".format(t=str(table),
                                             id=process_control_id))
        pc_detail = ProcessControlDetail(argv, mode, process_control_id)
        pc_detail.insert(
            object_schema=table.schema,
            object_name=table.name,
            comment=message,
            process_code=mode,
            infolog=logging_loader.get_logfile(const.INFO_HANDLER),
            errorlog=logging_loader.get_logfile(const.ERROR_HANDLER))
        logger.debug("Successfully inserted process_control_detail record")

        if argv.inputfile:
            fifo = build_file_reader(argv.inputfile)
        else:
            fifo = build_file_reader(file_path)

        if argv.donotload:
            message = (
                "Do Not Load option set, apply aborted for {table}".format(
                    table=table.fullname))
            logger.info(message)
            pc_detail.update(comment=message, status=const.SUCCESS)
        else:
            try:
                logger.debug("Getting source column list for applying")

                def append_metacolname(argv, colname_key, column_list):
                    colname = argv.metacols.get(colname_key)
                    if colname:
                        column_list.append(colname)

                map(lambda x: append_metacolname(argv, x, column_list), [
                    const.METADATA_INSERT_TS_COL, const.METADATA_UPDATE_TS_COL
                ])

                record_count = target_db.bulk_write(
                    input_file=fifo.handle,
                    table_name=table.fullname,
                    sep=const.FIELD_DELIMITER,
                    null_string=argv.nullstring,
                    column_list=column_list,
                    quote_char=chr(const.ASCII_GROUPSEPARATOR),
                    escape_char=chr(const.ASCII_RECORDSEPARATOR),
                    size=argv.buffersize)

                target_db.commit()

                message = ("Applied {count} {table} records".format(
                    count=record_count, table=table.fullname))

                logger.info(message)
                pc_detail.update(
                    comment=message,
                    status=const.SUCCESS,
                    source_row_count=record_count,
                    insert_row_count=record_count,
                )
                result = (datetime.datetime.now(), const.SUCCESS, message)
                apply_msg_queue.put(result)
            except Exception, err:
                err_message = "Failed apply: {err}".format(err=str(err))
                report_error(argv, err_message, pc_detail, logger)
                result = (datetime.datetime.now(), const.ERROR, err_message)
                apply_msg_queue.put(result)

    # A catchall to ensure a return message is sent
    except Exception, err:
        err_message = "Failed apply: {err}".format(err=str(err))
        logger.exception(err_message)
        result = (datetime.datetime.now(), const.ERROR, err_message)
        apply_msg_queue.put(result)
Esempio n. 6
0
def extract(argv, fifo_file_path, table, column_list, source_db,
            process_control_id, query_condition, extract_msg_queue):

    logging_loader.setup_logging(argv.workdirectory,
                                 "_".join([table.name, "extract"]))

    logger = logging.getLogger(__name__)

    mode = const.INITSYNCEXTRACT
    message = ("Starting {mode} for {table}".format(mode=mode,
                                                    table=table.fullname))
    logger.info(message)

    fifo = None
    raw_file = None
    record_count = 0
    result = {}
    lsn = None

    try:
        logger.debug("[{t}] Extract: Inserting process_control_detail record "
                     "for PC ID={id}".format(t=str(table),
                                             id=process_control_id))

        pc_detail = ProcessControlDetail(argv, mode, process_control_id)
        pc_detail.insert(
            comment=message,
            object_schema=table.schema,
            object_name=table.name,
            process_code=mode,
            infolog=logging_loader.get_logfile(const.INFO_HANDLER),
            errorlog=logging_loader.get_logfile(const.ERROR_HANDLER))
        logger.debug("Successfully inserted process_control_detail record")

        try:
            logger.debug("source column_list = {l}".format(l=column_list))

            results = source_db.extract_data(column_list, table,
                                             query_condition,
                                             _log_extract_data_sql)

            fifo = build_file_writer(fifo_file_path)

            if argv.rawfile is not None:
                filename = get_raw_filename(argv, table)
                raw_file = build_file_writer(filename)

            logger.debug("Writing to fifo")
            record_count = 0

            logger.debug("Fetching many ({}) records at a time...".format(
                argv.arraysize))
            while True:
                records = results.fetchmany(argv.arraysize)
                for record in records:
                    lsn = write(argv, record, fifo, raw_file)
                    record_count += 1
                    _log_progress(table, argv, record_count, logger)
                    _send_heartbeat(record_count, extract_msg_queue)

                if len(records) < argv.arraysize:
                    break

            message = ("Extracted {count} {table} records".format(
                count=record_count, table=table.fullname))

            logger.info(message)
            pc_detail.update(comment=message,
                             status=const.SUCCESS,
                             source_row_count=record_count,
                             delta_startlsn=lsn,
                             delta_endlsn=lsn,
                             query_condition=query_condition)

            # Return the result back to the parent process via the shared queue
            result = (datetime.datetime.now(), lsn, const.SUCCESS, message)
            extract_msg_queue.put(result)
        except Exception, err:
            err_message = "Failed extract: {err}".format(err=str(err))
            report_error(argv, err_message, pc_detail, logger)
            result = (datetime.datetime.now(), lsn, const.ERROR, err_message)
            extract_msg_queue.put(result)

        logger.debug("Finished writing to fifo")
Esempio n. 7
0
def initsync_table(argv, source_conn_details, target_conn_details,
                   source_schema, table_name, target_schema,
                   process_control_id, query_condition, initsync_msg_queue):

    logging_loader.setup_logging(argv.workdirectory, table_name)
    logger = logging.getLogger(__name__)

    lsn = None
    source_db = None
    target_db = None
    try:
        mode = const.INITSYNC

        _update_source_system_profile(argv, source_schema, target_schema,
                                      table_name, const.IN_PROGRESS,
                                      process_control_id, mode)

        pc_detail = ProcessControlDetail(argv, mode, process_control_id)
        pc_detail.insert(
            object_schema=source_schema,
            object_name=table_name,
            comment="Starting InitSync",
            process_code=mode,
            infolog=logging_loader.get_logfile(const.INFO_HANDLER),
            errorlog=logging_loader.get_logfile(const.ERROR_HANDLER))

        source_table = sql_utils.TableName(source_schema, table_name)
        target_table = sql_utils.TableName(target_schema, table_name)

        source_db = get_source_db(argv, source_conn_details, logger)
        if not _table_exists(argv, source_table, source_db, initsync_msg_queue,
                             pc_detail, logger, const.SOURCE):
            return

        target_db = get_target_db(argv, target_conn_details, logger)
        if not _table_exists(argv, target_table, target_db, initsync_msg_queue,
                             pc_detail, logger, const.TARGET):
            return

        clean_target_table(argv, target_db, target_table, query_condition,
                           process_control_id)

        if argv.loaddefinition == const.SRC:
            logger.debug("Loading column lists from SOURCE DB")
            source_column_list = source_db.get_source_column_list(source_table)
            target_column_list = source_db.get_target_column_list(source_table)
        elif argv.loaddefinition == const.DEST:
            logger.debug("Loading column lists from TARGET DB")
            source_column_list = target_db.get_source_column_list(target_table)
            target_column_list = target_db.get_target_column_list(target_table)
        else:
            raise Exception("Unsupported load definition: {d}".format(
                d=argv.loaddefinition))

        pipe_file = create_pipe(argv, source_table, logger)

        applier_proc = None
        extractor_proc = None

        message = ("[{t}] Starting new process for apply...".format(
            t=target_table.name))
        logger.debug(message)
        pc_detail.update(comment=message)

        applier_msg_queue = manager.Queue()
        applier_proc = Process(target=apply,
                               args=(argv, pipe_file, target_table,
                                     target_column_list, target_db,
                                     process_control_id, applier_msg_queue))
        applier_proc.start()
        apply_status = const.IN_PROGRESS

        if not argv.inputfile:
            message = ("[{t}] Starting new process for extract...".format(
                t=source_table.name))
            logger.debug(message)
            pc_detail.update(comment=message)

            extract_msg_queue = manager.Queue()
            extractor_proc = Process(target=extract,
                                     args=(argv, pipe_file, source_table,
                                           source_column_list, source_db,
                                           process_control_id, query_condition,
                                           extract_msg_queue))
            extractor_proc.start()
            extract_status = const.IN_PROGRESS
        else:
            extract_status = const.SUCCESS

        status = _manage_child_processes(argv, pc_detail, extractor_proc,
                                         extract_status, extract_msg_queue,
                                         applier_proc, apply_status,
                                         applier_msg_queue, source_table,
                                         target_table, initsync_msg_queue,
                                         logger)

        execute_post_processing(target_table, target_db)

        _update_source_system_profile(argv, source_schema, target_schema,
                                      table_name, status, process_control_id,
                                      mode)

    except Exception, e:
        message = ("[{table}] Failed initsync: {err}".format(table=table_name,
                                                             err=str(e)))
        logger.exception(message)
        put_table_result(initsync_msg_queue, table_name, lsn, const.ERROR,
                         const.INITSYNC, message)
Esempio n. 8
0
def test_process_control_detail_constructor(setup_test_constructor):
    (mockargv) = setup_test_constructor
    fake_process_control_id = 1
    obj = ProcessControlDetail(mockargv, const.INITSYNC,
                               fake_process_control_id)
    assert obj is not None