def load_zip_city_data(zip_city_file, force_reload):
    """ Load data into the ZipCity table

        Args:
            zip_city_file: path/url to file to gather ZipCity data from
            force_reload: boolean to determine if reload should happen whether there are differences or not
    """
    new_data = parse_zip_city_file(zip_city_file)

    diff_found = check_dataframe_diff(new_data, ZipCity, 'zip_city_id',
                                      ['zip_code'])

    if force_reload or diff_found:
        sess = GlobalDB.db().session
        logger.info(
            'Differences found or reload forced, reloading zip_city table.')
        # delete any data in the ZipCity table
        sess.query(ZipCity).delete()

        # insert data into table
        num = insert_dataframe(new_data, ZipCity.__table__.name,
                               sess.connection())
        logger.info('{} records inserted to zip_city'.format(num))
        sess.commit()
    else:
        logger.info('No differences found, skipping zip_city table reload.')
def load_zip_city_data(force_reload):
    """ Load data into the ZipCity table

        Args:
            force_reload: boolean to determine if reload should happen whether there are differences or not
    """
    if CONFIG_BROKER["use_aws"]:
        s3_client = boto3.client('s3', region_name=CONFIG_BROKER['aws_region'])
        citystate_file = s3_client.generate_presigned_url('get_object', {'Bucket': CONFIG_BROKER['sf_133_bucket'],
                                                                         'Key': "ctystate.txt"}, ExpiresIn=600)
        zip_city_file = urllib.request.urlopen(citystate_file)
    else:
        citystate_file = os.path.join(CONFIG_BROKER["path"], "dataactvalidator", "config", "ctystate.txt")
        zip_city_file = open(citystate_file)

    new_data = parse_zip_city_file(zip_city_file)

    diff_found = check_dataframe_diff(new_data, ZipCity, ['zip_city_id'], ['zip_code'])

    if force_reload or diff_found:
        sess = GlobalDB.db().session
        logger.info('Differences found or reload forced, reloading zip_city table.')
        # delete any data in the ZipCity table
        sess.query(ZipCity).delete()

        # insert data into table
        num = insert_dataframe(new_data, ZipCity.__table__.name, sess.connection())
        logger.info('{} records inserted to zip_city'.format(num))
        sess.commit()
    else:
        logger.info('No differences found, skipping zip_city table reload.')
def load_county_data(county_file, force_reload):
    """ Load data into the CountyCode table

        Args:
            county_file: path/url to file to gather County data from
            force_reload: boolean to determine if reload should happen whether there are differences or not
    """
    new_data = parse_county_file(county_file)

    diff_found = check_dataframe_diff(new_data, CountyCode, 'county_code_id',
                                      ['county_number', 'state_code'])

    if force_reload or diff_found:
        sess = GlobalDB.db().session
        logger.info(
            'Differences found or reload forced, reloading county_code table.')
        # delete any data in the CountyCode table
        sess.query(CountyCode).delete()

        # insert data into table
        num = insert_dataframe(new_data, CountyCode.__table__.name,
                               sess.connection())
        logger.info('{} records inserted to county_code'.format(num))
        sess.commit()
    else:
        logger.info('No differences found, skipping county_code table reload.')
def load_state_data(force_reload):
    """ Load data into the States table

        Args:
            force_reload: boolean to determine if reload should happen whether there are differences or not
    """
    start_time = datetime.now()
    state_file_url = '{}/state_list.csv'.format(CONFIG_BROKER['usas_public_reference_url'])
    with RetrieveFileFromUri(state_file_url, 'r').get_file_object() as state_file:
        new_data = parse_state_file(state_file)

    diff_found = check_dataframe_diff(new_data, States, ['states_id'], ['state_code'])

    if force_reload or diff_found:
        sess = GlobalDB.db().session
        logger.info('Differences found or reload forced, reloading states table.')
        # delete any data in the States table
        sess.query(States).delete()

        # insert data into table
        num = insert_dataframe(new_data, States.__table__.name, sess.connection())
        logger.info('{} records inserted to states'.format(num))
        sess.commit()
        update_external_data_load_date(start_time, datetime.now(), 'state_code')
    else:
        logger.info('No differences found, skipping states table reload.')
def load_frec(file_name, force_reload=False):
    """ Load FREC (high-level agency names) lookup table.

        Args:
            file_name: path/url to the file to be read
            force_reload: whether to reload regardless

        Returns:
            True if new data was loaded, False if the load was skipped
    """
    sess = GlobalDB.db().session
    models = {frec.frec_code: frec for frec in sess.query(FREC)}

    # read FREC values from csv
    data = pd.read_csv(file_name, dtype=str)

    # clean data
    data = clean_data(
        data,
        FREC,
        {'frec': 'frec_code', 'cgac_agency_code': 'cgac_code', 'frec_entity_description': 'agency_name',
         'frec_abbreviation': 'frec_abbreviation', 'frec_cgac_association': 'frec_cgac',
         'icon_filename': 'icon_name'},
        {'frec': {'keep_null': False}, 'cgac_code': {'pad_to_length': 3}, 'frec_code': {'pad_to_length': 4}}
    )
    data['icon_name'] = data['icon_name'].apply(clean_col, args=False)
    # de-dupe
    data = data[data.frec_cgac == 'TRUE']
    data.drop(['frec_cgac'], axis=1, inplace=True)
    data.drop_duplicates(subset=['frec_code'], inplace=True)
    # create foreign key dicts
    cgac_dict = {str(cgac.cgac_code): cgac.cgac_id for
                 cgac in sess.query(CGAC).filter(CGAC.cgac_code.in_(data['cgac_code'])).all()}
    cgac_dict_flipped = {cgac_id: cgac_code for cgac_code, cgac_id in cgac_dict.items()}

    # compare to existing content in table
    def extract_cgac(row):
        return cgac_dict_flipped[row['cgac_id']] if row['cgac_id'] in cgac_dict_flipped else np.nan

    diff_found = check_dataframe_diff(data, FREC, ['frec_id', 'cgac_id'], ['frec_code'],
                                      lambda_funcs=[('frec_abbreviation', extract_abbreviation),
                                                    ('agency_name', extract_name),
                                                    ('cgac_code', extract_cgac)])
    if force_reload or diff_found:
        # create foreign key dicts

        # insert to db
        delete_missing_frecs(models, data)
        update_frecs(models, data, cgac_dict)
        sess.add_all(models.values())
        sess.commit()

        logger.info('%s FREC records inserted', len(models))
        return True
    else:
        logger.info('No differences found, skipping frec table reload.')
        return False
Ejemplo n.º 6
0
def load_defc(force_reload=False):
    """ Loads the DEFC data.

        Args:
            force_reload: boolean to determine if reload should happen whether there are differences or not
    """
    start_time = datetime.now()
    defc_file = os.path.join(CONFIG_BROKER['path'], 'dataactvalidator',
                             'config', 'def_codes.csv')

    try:
        # Update file from public S3 bucket
        def_codes_url = '{}/def_codes.csv'.format(
            CONFIG_BROKER['usas_public_reference_url'])
        r = requests.get(def_codes_url, allow_redirects=True)
        open(defc_file, 'wb').write(r.content)
    except Exception:
        pass

    logger.info('Loading defc data')
    with create_app().app_context():
        data = pd.read_csv(defc_file, dtype=str)

        # Remove all invalid DEFCs that have been left in the file so USAS can continue to display them correctly
        data = data[data['Is Valid'] == 'true']

        data = clean_data(data, DEFC, {
            'defc': 'code',
            'group_name': 'group'
        }, {})

        diff_found = check_dataframe_diff(data, DEFC, ['defc_id'], ['code'])

        if force_reload or diff_found:
            sess = GlobalDB.db().session
            # delete any data in the DEFC table
            sess.query(DEFC).delete()

            # insert data into table
            num = insert_dataframe(data, DEFC.__table__.name,
                                   sess.connection())
            logger.info('{} records inserted to defc'.format(num))
            sess.commit()
            update_external_data_load_date(start_time, datetime.now(), 'defc')
        else:
            logger.info('No differences found, skipping defc table reload.')
    def load_sql(cls, filename):
        """ Load SQL-based validation rules to db. """
        with create_app().app_context():
            sess = GlobalDB.db().session
            filename = os.path.join(cls.sql_rules_path, filename)

            # Initial load
            sql_data = pd.read_csv(filename, dtype=str, usecols=cls.headers)
            sql_data = clean_data(
                sql_data,
                RuleSql,
                {'rule_label': 'rule_label', 'rule_error_message': 'rule_error_message', 'query_name': 'query_name',
                 'expected_value': 'expected_value', 'category': 'category', 'file_type': 'file_type',
                 'target_file': 'target_file', 'rule_cross_file_flag': 'rule_cross_file_flag',
                 'severity_name': 'severity_name'},
                {}
            )

            # Processing certain values
            sql_data['rule_sql'] = sql_data['query_name'].apply(lambda name: cls.read_sql_str(name))
            sql_data['file_id'] = sql_data['file_type'].apply(lambda type: FILE_TYPE_DICT.get(type, None))
            if sql_data['file_id'].isnull().values.any():
                raise Exception('Invalid file_type value found in sqlLoader. Must be one of the following: {}'
                                .format(', '.join(list(FILE_TYPE_DICT.keys()))))
            sql_data['target_file_id'] = sql_data['target_file'].apply(lambda type: FILE_TYPE_DICT.get(type, None))
            sql_data['rule_cross_file_flag'] = sql_data['rule_cross_file_flag'].apply(lambda flag:
                                                                                      flag in ('true', 't', 'y', 'yes'))
            sql_data['rule_severity_id'] = sql_data['severity_name'].apply(lambda severity_name:
                                                                           RULE_SEVERITY_DICT.get(severity_name, None))
            if sql_data['rule_severity_id'].isnull().values.any():
                raise Exception('Invalid severity_name value found in sqlLoader Must be one of the following: {}'
                                .format(', '.join(list(RULE_SEVERITY_DICT.keys()))))
            sql_data.drop(['file_type', 'severity_name', 'target_file'], axis=1, inplace=True)

            # Final check if we need to actually reload
            if check_dataframe_diff(sql_data, RuleSql, del_cols=['rule_sql_id', 'created_at', 'updated_at'],
                                    sort_cols=['rule_label', 'file_id', 'target_file_id']):
                # Delete and reload all records currently in table
                logger.info('Detected changes in {}, deleting RuleSQL and reloading'.format(cls.sql_rules_path))
                sess.query(RuleSql).delete()
                insert_dataframe(sql_data, RuleSql.__table__.name, sess.connection())
                sess.commit()
            else:
                logger.info('No changes detected since last load. Skipping.')
def load_cgac(file_name, force_reload=False):
    """ Load CGAC (high-level agency names) lookup table.

        Args:
            file_name: path/url to the file to be read
            force_reload: whether to reload regardless

        Returns:
            True if new data was loaded, False if the load was skipped
    """
    sess = GlobalDB.db().session
    models = {cgac.cgac_code: cgac for cgac in sess.query(CGAC)}

    # read CGAC values from csv
    data = pd.read_csv(file_name, dtype=str)
    # clean data
    data = clean_data(
        data,
        CGAC,
        {'cgac_agency_code': 'cgac_code', 'agency_name': 'agency_name', 'agency_abbreviation': 'agency_abbreviation',
         'icon_filename': 'icon_name'},
        {'cgac_code': {'pad_to_length': 3}}
    )
    data['icon_name'] = data['icon_name'].apply(clean_col, args=False)
    # de-dupe
    data.drop_duplicates(subset=['cgac_code'], inplace=True)

    # compare to existing content in table
    diff_found = check_dataframe_diff(data, CGAC, ['cgac_id'], ['cgac_code'],
                                      lambda_funcs=[('agency_abbreviation', extract_abbreviation),
                                                    ('agency_name', extract_name)])

    if force_reload or diff_found:
        delete_missing_cgacs(models, data)
        update_cgacs(models, data)
        sess.add_all(models.values())
        sess.commit()

        logger.info('%s CGAC records inserted', len(models))
        return True
    else:
        logger.info('No differences found, skipping cgac table reload.')
        return False
Ejemplo n.º 9
0
def load_cfda_program(base_path, load_local=False, local_file_name="cfda_program.csv"):
    """ Load cfda program.

        Args:
            base_path: directory that contains the cfda values files.
            load_local: boolean indicating whether to load from a local file or not
            local_file_name: the name of the file if loading locally
    """
    local_now = datetime.now()
    if not load_local:
        logger.info("Fetching CFDA file from {}".format(S3_CFDA_FILE))
        tmp_name = str(time.time()).replace(".", "") + "_cfda_program.csv"
        filename = os.path.join(base_path, tmp_name)
        r = requests.get(S3_CFDA_FILE, allow_redirects=True)
        open(filename, 'wb').write(r.content)
    else:
        filename = os.path.join(base_path, local_file_name)
    logger.info('Loading CFDA program file: ' + filename)
    model = CFDAProgram

    metrics_json = {
        'script_name': 'load_cfda_data.py',
        'start_time': str(local_now),
        'new_records': 0
    }

    def fix_program_number(row, decimals=3):
        multiplier = 10 ** decimals
        value = math.floor(row['program_number'] * multiplier + 0.5) / multiplier
        return str(value).ljust(6, '0')

    with create_app().app_context():
        configure_logging()
        sess = GlobalDB.db().session

        import_data = pd.read_csv(filename, dtype=str, encoding='cp1252', na_filter=False)
        import_data = clean_data(
            import_data,
            model,
            DATA_CLEANING_MAP,
            {}
        )
        import_data["published_date"] = format_date(import_data["published_date"])
        import_data["archived_date"] = format_date(import_data["archived_date"])
        table_name = model.__table__.name
        # Check if there is new data to load
        new_data = check_dataframe_diff(import_data, model, ['cfda_program_id'], ['program_number'],
                                        lambda_funcs=[('program_number', fix_program_number)])
        if new_data:
            # insert to db
            sess.query(model).delete()
            num = insert_dataframe(import_data, table_name, sess.connection())
            sess.commit()

            # If we've updated the data at all, update the external data load date
            update_external_data_load_date(local_now, datetime.now(), 'cfda')
    if not load_local:
        os.remove(filename)
    if new_data:
        logger.info('{} records inserted to {}'.format(num, table_name))
        metrics_json['new_records'] = num
    else:
        logger.info("Skipped cfda load, no new data.")
        sys.exit(3)

    metrics_json['duration'] = str(datetime.now() - local_now)

    with open('load_cfda_data_metrics.json', 'w+') as metrics_file:
        json.dump(metrics_json, metrics_file)
def load_country_codes(base_path, force_reload=False):
    """ Load Country Codes into the database.

        Args:
            base_path: directory that contains the domain values files.
            force_reload: boolean to determine if reload should happen whether there are differences or not
    """
    now = datetime.datetime.now()
    metrics_json = {
        'script_name': 'load_country_codes.py',
        'start_time': str(now),
        'records_deleted': 0,
        'records_provided': 0,
        'duplicates_dropped': 0,
        'records_inserted': 0
    }

    with create_app().app_context():
        sess = GlobalDB.db().session

        country_code_file = '{}/country_codes.csv'.format(
            CONFIG_BROKER['usas_public_reference_url'])

        logger.info(
            'Loading country codes file from {}'.format(country_code_file))

        # Get data from public S3 bucket
        r = requests.get(country_code_file, allow_redirects=True)
        filename = os.path.join(base_path, 'country_codes.csv')
        open(filename, 'wb').write(r.content)

        # Parse data
        data = pd.read_csv(filename, dtype=str)
        metrics_json['records_provided'] = len(data.index)
        data = clean_data(
            data, CountryCode, {
                'country_code': 'country_code',
                'country_name': 'country_name',
                '2_char_country_code': 'country_code_2_char',
                'territory_or_freely_associated_state': 'territory_free_state'
            }, {})
        # de-dupe
        data.drop_duplicates(subset=['country_code'], inplace=True)
        metrics_json['duplicates_dropped'] = metrics_json[
            'records_provided'] - len(data.index)

        # compare to existing content in table
        diff_found = check_dataframe_diff(data,
                                          CountryCode, ['country_code_id'],
                                          ['country_code'],
                                          lambda_funcs=[
                                              ('territory_free_state',
                                               convert_territory_bool_to_str)
                                          ])

        # insert to db if reload required
        if force_reload or diff_found:
            logger.info(
                'Differences found or reload forced, reloading country_code table.'
            )
            # if there's a difference, clear out the old data before adding the new stuff
            metrics_json['records_deleted'] = sess.query(CountryCode).delete()

            num = insert_dataframe(data, CountryCode.__table__.name,
                                   sess.connection())
            metrics_json['records_inserted'] = num
            sess.commit()

            # Updating data load dates if the load successfully added new country codes
            update_external_data_load_date(now, datetime.datetime.now(),
                                           'country_code')

            logger.info(
                '{} records inserted to country_code table'.format(num))
        else:
            logger.info(
                'No differences found, skipping country_code table reload.')

        # Delete file once we're done
        os.remove(filename)

    metrics_json['duration'] = str(datetime.datetime.now() - now)

    with open('load_country_codes_metrics.json', 'w+') as metrics_file:
        json.dump(metrics_json, metrics_file)
    logger.info('Script complete')
def load_sub_tier_agencies(file_name, force_reload=False):
    """ Load Sub Tier Agency (sub_tier-level agency names) lookup table.

        Args:
            file_name: path/url to the file to be read
            force_reload: whether to reload regardless

        Returns:
            True if new data was loaded, False if the load was skipped
    """
    sess = GlobalDB.db().session
    models = {sub_tier_agency.sub_tier_agency_code: sub_tier_agency for
              sub_tier_agency in sess.query(SubTierAgency)}

    # read Sub Tier Agency values from csv
    data = pd.read_csv(file_name, dtype=str)

    condition = data['TOPTIER_FLAG'] == 'TRUE'
    data.loc[condition, 'PRIORITY'] = 1
    data.loc[~condition, 'PRIORITY'] = 2
    data.replace({'TRUE': True, 'FALSE': False}, inplace=True)

    # clean data
    data = clean_data(
        data,
        SubTierAgency,
        {'cgac_agency_code': 'cgac_code', 'subtier_code': 'sub_tier_agency_code', 'priority': 'priority',
         'frec': 'frec_code', 'subtier_name': 'sub_tier_agency_name', 'is_frec': 'is_frec'},
        {'cgac_code': {'pad_to_length': 3}, 'frec_code': {'pad_to_length': 4},
         'sub_tier_agency_code': {'pad_to_length': 4}}
    )
    # de-dupe
    data.drop_duplicates(subset=['sub_tier_agency_code'], inplace=True)
    # create foreign key dicts
    cgac_dict = {str(cgac.cgac_code): cgac.cgac_id for
                 cgac in sess.query(CGAC).filter(CGAC.cgac_code.in_(data['cgac_code'])).all()}
    cgac_dict_flipped = {cgac_id: cgac_code for cgac_code, cgac_id in cgac_dict.items()}
    frec_dict = {str(frec.frec_code): frec.frec_id for
                 frec in sess.query(FREC).filter(FREC.frec_code.in_(data['frec_code'])).all()}
    frec_dict_flipped = {frec_id: frec_code for frec_code, frec_id in frec_dict.items()}

    # compare to existing content in table
    def int_to_float(row):
        return float(row['priority'])

    def extract_cgac(row):
        return cgac_dict_flipped[row['cgac_id']] if row['cgac_id'] in cgac_dict_flipped else np.nan

    def extract_frec(row):
        return frec_dict_flipped[row['frec_id']] if row['frec_id'] in frec_dict_flipped else np.nan

    diff_found = check_dataframe_diff(data, SubTierAgency, ['sub_tier_agency_id', 'cgac_id', 'frec_id'],
                                      ['sub_tier_agency_code'],
                                      lambda_funcs=[('priority', int_to_float), ('cgac_code', extract_cgac),
                                                    ('frec_code', extract_frec)])
    if force_reload or diff_found:
        delete_missing_sub_tier_agencies(models, data)
        update_sub_tier_agencies(models, data, cgac_dict, frec_dict)
        sess.add_all(models.values())
        sess.commit()

        logger.info('%s Sub Tier Agency records inserted', len(models))
        return True
    else:
        logger.info('No differences found, skipping subtier table reload.')
        return False