def test_aws_worker(): if not os.path.isfile(os.path.join(HERE, 'config.yml')): pytest.skip("Only for local tests for now") ramp_kit_dir = os.path.join(HERE, 'kits', 'iris') # make sure predictio and log dirs exist, if not, add them add_empty_dir(os.path.join(ramp_kit_dir, 'predictions')) add_empty_dir(os.path.join(ramp_kit_dir, 'logs')) # if the prediction / log files are still there, remove them for subdir in os.listdir(os.path.join(ramp_kit_dir, 'predictions')): if os.path.isdir(subdir): shutil.rmtree(subdir) for subdir in os.listdir(os.path.join(ramp_kit_dir, 'logs')): if os.path.isdir(subdir): shutil.rmtree(subdir) config = read_config(os.path.join(HERE, 'config.yml')) worker_config = generate_worker_config(config) worker = AWSWorker(worker_config, submission='starting_kit_local') worker.setup() assert worker.status == 'setup' worker.launch_submission() assert worker.status in ('running', 'finished') worker.collect_results() assert worker.status == 'collected' assert os.path.isdir( os.path.join(ramp_kit_dir, 'predictions', 'starting_kit_local', 'fold_0')) assert os.path.isfile( os.path.join(ramp_kit_dir, 'logs', 'starting_kit_local', 'log')) worker.teardown() assert worker.status == 'killed'
def test_aws_worker_download_log_error(superclass, test_rsync, caplog): # mock dummy AWS instance class DummyInstance: id = 'test' test_rsync.side_effect = subprocess.CalledProcessError(255, 'test') # setup the AWS worker superclass.return_value = True event_config = read_config(ramp_aws_config_template())['worker'] worker = AWSWorker(event_config, submission='starting_kit_local') worker.config = event_config worker.status = 'finished' worker.instance = DummyInstance # worker will now through an CalledProcessError exit_status, error_msg = worker.collect_results() assert 'Error occurred when downloading the logs' in caplog.text assert 'Trying to download the log once again' in caplog.text assert exit_status == 2 assert 'test' in error_msg assert worker.status == 'error'