def generate_conf(guid_batch, phase_number, load_type, tenant_code, target_schema):
    """
    Return all needed configuration information
    :param guid_batch: the guid for the batch
    :param phase_number: the current number of the phase
    :param load_type: type of load. ie. assessment
    :param tenant_code: the tenants 2 letter code
    :return: A dictionary of the config details
    """
    db_params_tuple = get_db_connection_params(udl2_conf['udl2_db_conn']['url'])

    conf = {
        # add guid_batch from msg
        mk.GUID_BATCH: guid_batch,
        # source schema
        mk.SOURCE_DB_SCHEMA: udl2_conf['udl2_db_conn']['db_schema'],
        # source database setting
        mk.SOURCE_DB_DRIVER: db_params_tuple[0],
        mk.SOURCE_DB_USER: db_params_tuple[1],
        mk.SOURCE_DB_PASSWORD: db_params_tuple[2],
        mk.SOURCE_DB_HOST: db_params_tuple[3],
        mk.SOURCE_DB_PORT: db_params_tuple[4],
        mk.SOURCE_DB_NAME: db_params_tuple[5],
        mk.SOURCE_DB_TABLE: Constants.UDL2_JSON_INTEGRATION_TABLE(load_type),
        mk.TARGET_DB_SCHEMA: target_schema,
        mk.REF_TABLE: Constants.UDL2_REF_MAPPING_TABLE(load_type),
        mk.PHASE: int(phase_number),
        mk.LOAD_TYPE: load_type,
        mk.TENANT_NAME: tenant_code,
        mk.PROD_DB_SCHEMA: udl2_conf['prod_db_conn'][tenant_code]['db_schema']
    }
    return conf
Exemple #2
0
 def test_all_lambda_constants(self):
     self.assertEqual(len(Constants.LOAD_TYPES()), 2)
     self.assertEqual(Constants.LOAD_TYPES(), [
         Constants.LOAD_TYPE_ASSESSMENT,
         Constants.LOAD_TYPE_STUDENT_REGISTRATION
     ])
     self.assertEqual(len(Constants.ASSESSMENT_TYPES()), 3)
     self.assertEqual(Constants.ASSESSMENT_TYPES(), [
         Constants.ASSESSMENT_TYPE_SUMMATIVE,
         Constants.ASSESSMENT_TYPE_INTERIM_COMPREHENSIVE,
         Constants.ASSESSMENT_TYPE_INTERIM_ASSESSMENT_BLOCKS
     ])
     self.assertEqual(
         Constants.UDL2_STAGING_TABLE(Constants.LOAD_TYPE_ASSESSMENT),
         Constants.STG_ASMT_OUT_TABLE)
     self.assertEqual(
         Constants.UDL2_STAGING_TABLE(
             Constants.LOAD_TYPE_STUDENT_REGISTRATION),
         Constants.STG_SR_TABLE)
     self.assertEqual(
         Constants.UDL2_INTEGRATION_TABLE(Constants.LOAD_TYPE_ASSESSMENT),
         Constants.INT_ASMT_OUT_TABLE)
     self.assertEqual(
         Constants.UDL2_INTEGRATION_TABLE(
             Constants.LOAD_TYPE_STUDENT_REGISTRATION),
         Constants.INT_SR_TABLE)
     self.assertEqual(
         Constants.UDL2_JSON_INTEGRATION_TABLE(
             Constants.LOAD_TYPE_ASSESSMENT), Constants.INT_ASMT_TABLE)
     self.assertEqual(
         Constants.UDL2_JSON_INTEGRATION_TABLE(
             Constants.LOAD_TYPE_STUDENT_REGISTRATION),
         Constants.INT_SR_META_TABLE)
     self.assertEqual(
         Constants.UDL2_REF_MAPPING_TABLE(Constants.LOAD_TYPE_ASSESSMENT),
         Constants.ASMT_REF_TABLE)
     self.assertEqual(
         Constants.UDL2_REF_MAPPING_TABLE(
             Constants.LOAD_TYPE_STUDENT_REGISTRATION),
         Constants.SR_REF_TABLE)
     self.assertEqual(Constants.TENANT_SEQUENCE_NAME('cat'),
                      Constants.SEQUENCE_NAME + '_' + 'cat')
     self.assertEqual(Constants.TENANT_SEQUENCE_NAME(''), None)
     self.assertEqual(Constants.TENANT_SEQUENCE_NAME(None), None)
def get_content_validator_conf(guid_batch, load_type):
    udl_db_conn = udl2_conf.get(Constants.UDL2_DB_CONN)
    conf = {
        mk.SOURCE_DB_SCHEMA: udl_db_conn.get(Constants.DB_SCHEMA),
        mk.ASMT_TABLE: Constants.UDL2_JSON_INTEGRATION_TABLE(load_type),
        mk.ASMT_OUTCOME_TABLE: Constants.UDL2_STAGING_TABLE(load_type),
        mk.GUID_BATCH: guid_batch,
        mk.LOAD_TYPE: load_type
    }
    return conf
    def verify_json_load(self, load_type, conf, columns, guid):
        load_json(conf)

        sr_int_table = self.udl2_conn.get_table(
            Constants.UDL2_JSON_INTEGRATION_TABLE(load_type))
        query = select(['*'], sr_int_table.c.guid_batch == guid)
        result = self.udl2_conn.execute(query).fetchall()
        for row in result:
            self.assertEqual(len(row), len(columns),
                             'Unexpected number of columns')
            for column in columns:
                self.assertTrue(row[column],
                                'Expected column does not have data')
 def generate_config(self, load_type, file, guid):
     results = sfv_util.get_source_target_column_values_from_ref_column_mapping(
         Constants.UDL2_JSON_LZ_TABLE, load_type)
     conf = {
         mk.GUID_BATCH: guid,
         mk.FILE_TO_LOAD: file,
         mk.MAPPINGS:
         dict([(row[0], row[1].split('.')) for row in results]),
         mk.TARGET_DB_TABLE:
         Constants.UDL2_JSON_INTEGRATION_TABLE(load_type),
         mk.TARGET_DB_SCHEMA: udl2_conf['udl2_db_conn']['db_schema'],
         mk.TENANT_NAME: 'cat'
     }
     return conf
def generate_conf_for_loading(json_file, guid_batch, load_type, tenant_name):
    '''
    takes the msg and pulls out the relevant parameters to pass
    the method that loads the json
    '''
    results = sfv_util.get_source_target_column_values_from_ref_column_mapping(
        Constants.UDL2_JSON_LZ_TABLE, load_type)
    conf = {
        mk.FILE_TO_LOAD: json_file,
        mk.MAPPINGS: dict([(row[0], row[1].split('.')) for row in results]),
        mk.TARGET_DB_SCHEMA: udl2_conf['udl2_db_conn']['db_schema'],
        mk.TARGET_DB_TABLE: Constants.UDL2_JSON_INTEGRATION_TABLE(load_type),
        mk.GUID_BATCH: guid_batch,
        mk.TENANT_NAME: tenant_name
    }
    return conf
Exemple #7
0
def task(msg):
    start_time = datetime.datetime.now()
    logger.info(
        "LOAD_SR_INTEGRATION_TO_TARGET: Migrating data from SR integration tables to target tables."
    )
    guid_batch = msg[mk.GUID_BATCH]
    load_type = msg[mk.LOAD_TYPE]

    source_tables = [
        Constants.UDL2_INTEGRATION_TABLE(load_type),
        Constants.UDL2_JSON_INTEGRATION_TABLE(load_type)
    ]
    target_table = Constants.SR_TARGET_TABLE

    target_schema = msg[
        mk.TARGET_DB_SCHEMA] if mk.TARGET_DB_SCHEMA in msg else msg[
            mk.GUID_BATCH]
    conf = generate_conf(guid_batch, msg[mk.PHASE], load_type,
                         msg[mk.TENANT_NAME], target_schema)
    affected_rows = move_data_from_int_tables_to_target_table(
        conf, task.name, source_tables, target_table)

    end_time = datetime.datetime.now()

    # benchmark
    benchmark = BatchTableBenchmark(guid_batch,
                                    load_type,
                                    task.name,
                                    start_time,
                                    end_time,
                                    task_id=str(task.request.id),
                                    working_schema="",
                                    size_records=affected_rows[0],
                                    tenant=msg[mk.TENANT_NAME])
    benchmark.record_benchmark()

    notification_data = {mk.TOTAL_ROWS_LOADED: affected_rows[0]}
    merge_to_udl2stat_notification(guid_batch, notification_data)
    outgoing_msg = {}
    outgoing_msg.update(msg)
    outgoing_msg.update(notification_data)
    return outgoing_msg