Example #1
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('infile', help='GoogleDrive/Team Shared folder - California OpenJustice/Data/Offense Summary Codes')
    args = parser.parse_args()
    with utils.get_connection(URL) as conn:
        table = tables.OffenseSummaryCode()
        utils.recreate_table_from_file(conn, table, args.infile)
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('infile', help='full path to file containing county contextual data')
    args = parser.parse_args()
    with utils.get_connection(URL) as conn:
        table = tables.CountyAgencyContext()
        utils.recreate_table_from_file(conn, table, args.infile)
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('infile', help='full path to file containing county crime clearances data')
    args = parser.parse_args()
    with utils.get_connection(URL) as conn:
        table = tables.CountyClearancesSummary()
        utils.recreate_table_from_file(conn, table, args.infile)
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('infile',
                        help='full path to file containing LEO assault data')
    args = parser.parse_args()
    with utils.get_connection(URL) as conn:
        table = tables.LeoAssaultsSummary()
        utils.recreate_table_from_file(conn, table, args.infile)
def main():
    utils.setup_logging()
    parser = argparse.ArgumentParser()
    parser.add_argument('inpath', help='GoogleDrive/Team Shared folder - California OpenJustice/Data/arrests/macr/transformed_table_format')
    args = parser.parse_args()
    with utils.get_connection(URL) as conn:
        table = tables.Macr()
        utils.load_data_from_file(conn, table.TABLENAME, args.inpath)
def main():
    utils.setup_logging()
    with utils.get_connection(URL) as conn:
        table = tables.Macr()
        start = time.time()
        idxname_columnname = (
            ('idx_macr_arrest_year', 'arrest_year'),
            ('idx_macr_arrest_month', 'arrest_month'),
            ('idx_macr_offense_level', 'offense_level'),
            ('idx_macr_bcs_summary_offense_code', 'bcs_summary_offense_code'),
            ('idx_macr_race_or_ethnicity', 'race_or_ethnicity'),
            ('idx_macr_status_type', 'status_type'),
            ('idx_macr_disposition', 'disposition'),
        )
        for idx in idxname_columnname:
            idxname, columnname = idx
            logger.info('Adding index %s to %s', idxname, columnname)
            utils.create_index(conn, table.TABLENAME, idxname, columnname)
            conn.commit()
        end = time.time()
        logging.info('Completed adding indexes in %s seconds', end - start)
def main():
    utils.setup_logging()
    parser = argparse.ArgumentParser()
    parser.add_argument('inpath',
                        help='full path to directory containing MACR data')
    args = parser.parse_args()
    files = glob.glob('{}/macr*csv'.format(args.inpath))
    files.sort()
    num_files = len(files)
    with utils.get_connection(URL) as conn:
        table = tables.Macr()
        utils.drop_table(conn, table.TABLENAME)
        utils.create_table(conn, table.TABLENAME, table.COLUMNS)
        start = time.time()
        for num, f in enumerate(files):
            logger.info('Loading file %s of %s', num + 1, num_files)
            utils.load_data_from_file(conn, table.TABLENAME, f)
            conn.commit()
        end = time.time()
        logging.info('Completed loading %s files in %s seconds', num_files,
                     end - start)