Example #1
0
def main():

    logger = get_root_logger()
    get_header(logger, 'LOADING PROJECTIONS')

    client = APIClient()

    # grab dataframe shape from a trial run
    data = client.get_data('weekly-projections', 'json', 'QB')
    test_df = json_normalize(data['Projections'])

    # get DF structure from columns in test_df
    cols = test_df.columns
    df = DataFrame(columns=cols)

    # grab current week
    current_week = test_df.week.values[0]

    # loop through all weeks up to current week
    for wk in [str(x) for x in range(int(current_week))]:
        logger.info('Processing projections for week {0}'.format(int(wk) + 1))
        # loop through all positions
        for pos in ['QB', 'RB', 'WR', 'TE', 'K', 'DEF']:
            tmp_data = client.get_data('weekly-projections', 'json', pos, wk)
            tmp_df = json_normalize(tmp_data['Projections'])
            df = df.append(tmp_df)

    # import this df directly to PG DB
    conn = DBClient()
    conn.load(df, 'projections', schema='raw', if_exists='replace')
Example #2
0
def main(service, key, table):

    logger = get_root_logger()
    get_header(logger, 'GENERIC API IMPORTER')
    logger.info('Importing from {0} API'.format(service))

    importer = GenericImporter()
    data = importer.extract(service, key)

    importer.load(data, table)
Example #3
0
def main():

    logger = get_root_logger()
    get_header(logger, "Importing Bye weeks")

    client = APIClient()
    data = client.get_data("byes")

    df = None
    for key in data.keys():
        # build DF the first time through the loop
        if df is None:
            df = json_normalize(data[key])
        # append every other time
        else:
            df = df.append(json_normalize(data[key]))

    # import this df directly to PG DB
    conn = DBClient()
    conn.load(df, "byes", schema="raw", if_exists="replace")
Example #4
0
from db_client import DBClient

def main():
    
    path = '{0}/data/salaries/*'.format(getenv('BASEDIR'))
    files_list = glob(path)
    
    for i, f in enumerate(files_list):
        df = read_csv(f, sep=';')
        df.rename(columns={'h/a': 'homeoraway',
                            'DK points': 'dk_proj_points',
                            'DK salary': 'dk_salary'
                }, inplace=True
        )

        # first iteration, create a new df
        if i == 0:
            all_df = df
        # future iterations, append to is
        else:
            all_df = all_df.append(df)
        
    conn = DBClient()
    conn.load(all_df, 'salaries', if_exists='replace', schema='raw')
    

if __name__ == '__main__':
    logger = get_root_logger()
    get_header(logger, 'LOADING DRAFT KINGS SALARIES')
    main()