def test_get_asc_feed(self, app): rows = asc_athletes_api.get_asc_feed() assert len(rows) == 9 assert rows[0]['SyncDate'] == '2018-01-31'
def run(self): app.logger.info( 'ASC import: Fetch team and student athlete data from ASC API') api_results = get_asc_feed() if 'error' in api_results: raise BackgroundJobError( 'ASC import: Error from external API: {}'.format( api_results['error'])) elif not api_results: raise BackgroundJobError('ASC import: API returned zero students') sync_date = api_results[0]['SyncDate'] if sync_date != api_results[-1]['SyncDate']: raise BackgroundJobError( f'ASC import: SyncDate conflict in ASC API: {api_results[0]} vs. {api_results[-1]}' ) rows = [] for r in api_results: if r['AcadYr'] == app.config['ASC_THIS_ACAD_YR'] and r['SportCode']: asc_code = r['SportCodeCore'] if asc_code in SPORT_TRANSLATIONS: group_code = r['SportCode'] data = [ r['SID'], str(r.get('ActiveYN', 'No') == 'Yes'), str(r.get('IntensiveYN', 'No') == 'Yes'), r.get('SportStatus', ''), group_code, _unambiguous_group_name(r['Sport'], group_code), SPORT_TRANSLATIONS[asc_code], r['SportCore'], ] rows.append(encoded_tsv_row(data)) else: sid = r['SID'] app.logger.error( f'ASC import: Unmapped asc_code {asc_code} has ActiveYN for sid={sid}' ) s3_key = f'{get_s3_asc_daily_path()}/asc_api_raw_response_{sync_date}.tsv' if not s3.upload_tsv_rows(rows, s3_key): raise BackgroundJobError('Error on S3 upload: aborting job.') app.logger.info('Copy data in S3 file to Redshift...') query = resolve_sql_template_string( """ TRUNCATE {redshift_schema_asc}.students; COPY {redshift_schema_asc}.students FROM 's3://{s3_bucket}/{s3_key}' IAM_ROLE '{redshift_iam_role}' DELIMITER '\\t'; """, s3_bucket=app.config['LOCH_S3_BUCKET'], s3_key=s3_key, ) if not redshift.execute(query): raise BackgroundJobError('Error on Redshift copy: aborting job.') status = { 'this_sync_date': sync_date, 'api_results_count': len(api_results), } app.logger.info( f'ASC import: Successfully completed import job: {str(status)}') return status