def test_remove_obsolete_files(self, app, caplog, cleanup_s3): """Removes files from S3 following prefix and whitelist rules.""" caplog.set_level(logging.INFO) with capture_app_logs(app): prefix1 = app.config['LOCH_S3_PREFIX_TESTEXT'] + '/001' prefix2 = app.config['LOCH_S3_PREFIX_TESTEXT'] + '/002' assert s3.upload_from_url( 'http://shakespeare.mit.edu/Poetry/sonnet.XX.html', prefix1 + '/xx/sonnet-xx.html') assert s3.upload_from_url( 'http://shakespeare.mit.edu/Poetry/sonnet.XXI.html', prefix1 + '/xxi/sonnet-xxi.html') assert s3.upload_from_url( 'http://shakespeare.mit.edu/Poetry/sonnet.XXII.html', prefix1 + '/xxii/sonnet-xxii.html') assert s3.upload_from_url( 'http://shakespeare.mit.edu/Poetry/sonnet.XLV.html', prefix2 + '/xlv/sonnet-xlv.html') whitelist = ['sonnet-xxi.html', 'sonnet-xxii.html'] assert s3.delete_objects_with_prefix(prefix1, whitelist) is True assert f'3 key(s) matching prefix "{prefix1}"' in caplog.text assert '2 key(s) in whitelist' in caplog.text assert 'will delete 1 object(s)' in caplog.text assert s3.object_exists(prefix1 + '/xx/sonnet-xx.html') is False assert s3.object_exists(prefix1 + '/xxi/sonnet-xxi.html') is True assert s3.object_exists(prefix1 + '/xxii/sonnet-xxii.html') is True assert s3.object_exists(prefix2 + '/xlv/sonnet-xlv.html') is True
def run(self, cleanup=True): job_id = self.generate_job_id() app.logger.info(f'Starting Canvas snapshot sync job... (id={job_id})') snapshot_response = canvas_data.get_snapshots() if not snapshot_response: raise BackgroundJobError( 'Error retrieving Canvas data snapshots, aborting job.') snapshots = snapshot_response.get('files', []) def should_sync(snapshot): return snapshot[ 'table'] == 'requests' and snapshot['partial'] is False snapshots_to_sync = [s for s in snapshots if should_sync(s)] app.logger.info( f'Will sync {len(snapshots_to_sync)} of {len(snapshots)} available files from Canvas Data.' ) success = 0 failure = 0 for snapshot in snapshots_to_sync: metadata.create_canvas_sync_status( job_id=job_id, filename=snapshot['filename'], canvas_table=snapshot['table'], source_url=snapshot['url'], ) key_components = [ app.config['LOCH_S3_CANVAS_DATA_PATH_HISTORICAL'], snapshot['table'], snapshot['filename'] ] key = '/'.join(key_components) response = dispatch('sync_file_to_s3', data={ 'canvas_sync_job_id': job_id, 'url': snapshot['url'], 'key': key }) if not response: app.logger.error('Failed to dispatch S3 sync of snapshot ' + snapshot['filename']) metadata.update_canvas_sync_status( job_id, key, 'error', details=f'Failed to dispatch: {response}') failure += 1 else: app.logger.info('Dispatched S3 sync of snapshot ' + snapshot['filename']) success += 1 if cleanup: app.logger.info('Will remove obsolete snapshots from S3.') current_snapshot_filenames = [ s['filename'] for s in snapshots_to_sync ] requests_prefix = app.config[ 'LOCH_S3_CANVAS_DATA_PATH_HISTORICAL'] + '/requests' delete_result = s3.delete_objects_with_prefix( requests_prefix, whitelist=current_snapshot_filenames) if not delete_result: app.logger.error('Cleanup of obsolete snapshots failed.') return f'Canvas snapshot sync job dispatched to workers ({success} successful dispatches, {failure} failures).'
def run(self, cleanup=True): job_id = self.generate_job_id() app.logger.info(f'Starting Canvas snapshot sync job... (id={job_id})') snapshot_response = canvas_data.get_snapshots() if not snapshot_response: raise BackgroundJobError( 'Error retrieving Canvas data snapshots, aborting job.') snapshots = snapshot_response.get('files', []) def should_sync(snapshot): # For tables other than requests, sync all snapshots. # For the requests table, sync snapshots that are partial or later than the configured cutoff date. def after_cutoff_date(url): match = re.search('requests/(20\d{6})', url) return match is not None and ( match[1] >= app.config['LOCH_CANVAS_DATA_REQUESTS_CUTOFF_DATE']) return snapshot['table'] != 'requests' or snapshot[ 'partial'] is True or after_cutoff_date(snapshot['url']) snapshots_to_sync = [s for s in snapshots if should_sync(s)] app.logger.info( f'Will sync {len(snapshots_to_sync)} of {len(snapshots)} available files from Canvas Data.' ) success = 0 failure = 0 for snapshot in snapshots_to_sync: metadata.create_canvas_sync_status( job_id=job_id, filename=snapshot['filename'], canvas_table=snapshot['table'], source_url=snapshot['url'], ) if snapshot['table'] == 'requests': key_components = [ berkeley.s3_canvas_data_path_current_term(), snapshot['table'], snapshot['filename'] ] else: key_components = [ get_s3_canvas_daily_path(), snapshot['table'], snapshot['filename'] ] key = '/'.join(key_components) response = dispatch('sync_file_to_s3', data={ 'canvas_sync_job_id': job_id, 'url': snapshot['url'], 'key': key }) if not response: app.logger.error('Failed to dispatch S3 sync of snapshot ' + snapshot['filename']) metadata.update_canvas_sync_status( job_id, key, 'error', details=f'Failed to dispatch: {response}') failure += 1 else: app.logger.info('Dispatched S3 sync of snapshot ' + snapshot['filename']) success += 1 if cleanup: app.logger.info('Will remove obsolete snapshots from S3.') current_snapshot_filenames = [ s['filename'] for s in snapshots_to_sync ] requests_prefix = berkeley.s3_canvas_data_path_current_term( ) + '/requests' delete_result = s3.delete_objects_with_prefix( requests_prefix, whitelist=current_snapshot_filenames) if not delete_result: app.logger.error('Cleanup of obsolete snapshots failed.') return f'Canvas snapshot sync job dispatched to workers ({success} successful dispatches, {failure} failures).'