def test_image_all_1000s(): tester = ThousandsTest(None) image = FakeImage() image.data += 5 image.data[:, :] = 1000 image = tester.do_stage(image) assert image is None
def test_crosstalk(set_random_seed): tester = CrosstalkCorrector(None) nx = 101 ny = 103 image = FakeImage(nx=nx, ny=ny) # Add random pixels at 10000 to each of the extensions for amp in range(4): image.data = np.ones((4, ny, nx)) * 1000.0 random_pixels_x = np.random.randint(0, nx - 1, size=int(0.05 * nx * ny)) random_pixels_y = np.random.randint(0, ny - 1, size=int(0.05 * nx * ny)) for i in zip(random_pixels_y, random_pixels_x): image.data[amp][i] = 10000 expected_image_data = image.data.copy() # Simulate crosstalk original_data = image.data.copy() for j in range(4): for i in range(4): if i != j: crosstalk_coeff = np.random.uniform(0.0, 0.01) image.header['CRSTLK{i}{j}'.format(i=i+1, j=j+1)] = crosstalk_coeff image.data[j] += original_data[i] * crosstalk_coeff # Try to remove it image = tester.do_stage(image) # Assert that we got back the original image np.testing.assert_allclose(image.data, expected_image_data, atol=2.0, rtol=1e-5)
def test_no_pixels_saturated(): tester = SaturationTest(None) image = FakeImage() image.header['SATURATE'] = 65535 image = tester.do_stage(image) assert image is not None assert image.header.get('SATFRAC') == 0.0
def test_get_inner_quarter_default(): test_image = FakeImage() test_image.data = np.random.randint(0, 1000, size=test_image.data.shape) # get inner quarter manually inner_nx = round(test_image.nx * 0.25) inner_ny = round(test_image.ny * 0.25) inner_quarter = test_image.data[inner_ny:-inner_ny, inner_nx:-inner_nx] np.testing.assert_array_equal(test_image.get_inner_image_section(), inner_quarter)
def test_pattern_noise_on_2d_image(set_random_seed): image = FakeImage() image.data = generate_data(has_pattern_noise=True) detector = pattern_noise.PatternNoiseDetector(None) logger.error = mock.MagicMock() detector.do_stage(image) assert logger.error.called
def test_no_offset(): tester = PointingTest(None) image = FakeImage() image.header['CRVAL1'] = '1.0' image.header['CRVAL2'] = '-1.0' image.header['OFST-RA'] = '0:04:00.00' image.header['OFST-DEC'] = '-01:00:00.000' image = tester.do_stage(image) np.testing.assert_allclose(image.header.get('PNTOFST'), 0.0, atol=1e-7)
def test_bias_master_level_subtraction_is_reasonable(set_random_seed): input_bias = 2000.0 read_noise = 15.0 subtractor = BiasMasterLevelSubtractor(None) image = FakeImage() image.data = np.random.normal(input_bias, read_noise, size=(image.ny, image.nx)) image = subtractor.do_stage(image) np.testing.assert_allclose(np.zeros(image.data.shape), image.data, atol=8 * read_noise) np.testing.assert_allclose(image.header.get('BIASLVL'), input_bias, atol=1.0)
def test_dark_normalization_is_reasonable(set_random_seed): nx = 101 ny = 103 normalizer = DarkNormalizer(None) data = np.random.normal(30.0, 10, size=(ny, nx)) image = FakeImage() image.data = data.copy() image = normalizer.do_stage(image) np.testing.assert_allclose(image.data, data / image.exptime, 1e-5)
def test_image_30_percent_1000(set_random_seed): tester = ThousandsTest(None) nx = 101 ny = 103 image = FakeImage(nx=nx, ny=ny) random_pixels_x = np.random.randint(0, nx - 1, size=int(0.3 * nx * ny)) random_pixels_y = np.random.randint(0, ny - 1, size=int(0.3 * nx * ny)) for i in zip(random_pixels_y, random_pixels_x): image.data[i] = 1000 image = tester.do_stage(image) assert image is None
def test_large_offset(): tester = PointingTest(None) image = FakeImage() image.header['CRVAL1'] = '00:00:00.000' image.header['CRVAL2'] = '-00:00:00.000' image.header['OFST-RA'] = '00:00:00.000' image.header['OFST-DEC'] = '-00:00:10.000' image = tester.do_stage(image) assert image.header.get('PNTOFST') == 10.0
def test_pattern_noise_on_3d_image(mock_save_qc): data = 100.0 * np.sin(np.arange(1000000 * 4) / 0.1) + 1000.0 + np.random.normal(0.0, 10.0, size=1000000 * 4) data = data.reshape(4, 1000, 1000) image = FakeImage() image.data = data detector = pattern_noise.PatternNoiseDetector(None) _ = detector.do_stage([image]) assert mock_save_qc.called
def test_pattern_noise_in_only_one_quadrant(mock_save_qc): data = np.random.normal(0.0, 10.0, size=1000000 * 4) + 1000.0 data = data.reshape(4, 1000, 1000) data[3] += 100.0 * np.sin(np.arange(1e6) / 0.1).reshape(1000, 1000) image = FakeImage() image.data = data detector = pattern_noise.PatternNoiseDetector(None) _ = detector.do_stage([image]) assert mock_save_qc.called
def test_image_10_percent_saturated_rejected(set_random_seed): tester = SaturationTest(None) nx = 101 ny = 103 image = FakeImage(nx=nx, ny=ny) image.header['SATURATE'] = 65535 random_pixels_x = np.random.randint(0, nx - 1, size=int(0.1 * nx * ny)) random_pixels_y = np.random.randint(0, ny - 1, size=int(0.1 * nx * ny)) for i in zip(random_pixels_y, random_pixels_x): image.data[i] = image.header['SATURATE'] image = tester.do_stage(image) assert image is None
def test_does_not_reject_noisy_image(mock_cal, mock_frame, set_random_seed): mock_cal.return_value = 'test.fits' master_readnoise = 3.0 nx = 101 ny = 103 context = make_context_with_master_bias(readnoise=master_readnoise, nx=nx, ny=ny) comparer = BiasComparer(context) image = FakeImage(image_multiplier=0.0) image.data = np.random.normal(0.0, image.readnoise, size=(ny, nx)) image = comparer.do_stage(image) assert image.is_bad is False
def test_saturation_es_update(): # Set initial values stage = Stage(FakeElasticsearchContext()) image = FakeImage() stage.save_qc_results({'Saturated': True, 'saturation_fraction': 0.99}, image) # Run saturation test tester = SaturationTest(FakeElasticsearchContext()) image.header['SATURATE'] = 65535 image.data += 5.0 tester.do_stage([image]) # Check info from elasticsearch results = elasticsearch.Elasticsearch(ES_URL).get_source( index=stage.ES_INDEX, doc_type=stage.ES_DOC_TYPE, id='test') assert not results['Saturated'] assert results['saturation_fraction'] == 0.0
def test_image_2_percent_saturated(set_random_seed): tester = SaturationTest(None) nx = 101 ny = 103 image = FakeImage(nx=nx, ny=ny) image.header['SATURATE'] = 65535 random_pixels_x = np.random.randint(0, nx - 1, size=int(0.02 * nx * ny)) random_pixels_y = np.random.randint(0, ny - 1, size=int(0.02 * nx * ny)) for i in zip(random_pixels_y, random_pixels_x): image.data[i] = image.header['SATURATE'] image = tester.do_stage(image) assert image is not None assert np.abs(image.header.get('SATFRAC') - 0.02) < 0.001
def test_raises_exception_if_no_master_calibration(mock_cal, mock_images): mock_cal.return_value = None mock_images.return_value = FakeBiasImage() subtractor = BiasSubtractor(None) with pytest.raises(MasterCalibrationDoesNotExist): images = subtractor.do_stage([FakeImage() for x in range(6)])
def test_save_results(): stage = Stage(FakeElasticsearchContext()) image = FakeImage() output = stage.save_qc_results({}, image, _source=True) assert '_shards' in output assert output['_shards']['failed'] == 0 assert 'get' in output
def test_save_qc_results(mock_es): context = FakeContext() context.post_to_elasticsearch = True context.elasticsearch_url = '/' stage = FakeStage(context) qc.save_qc_results(stage.runtime_context, {}, FakeImage()) assert mock_es.called
def test_crosstalk(): tester = CrosstalkCorrector(None) nx = 101 ny = 103 images = [FakeImage(nx=nx, ny=ny) for x in range(6)] # Add random pixels at 10000 to each of the extensions for image in images: for amp in range(4): image.data = np.ones((4, ny, nx)) * 1000.0 random_pixels_x = np.random.randint(0, nx - 1, size=int(0.05 * nx * ny)) random_pixels_y = np.random.randint(0, ny - 1, size=int(0.05 * nx * ny)) for i in zip(random_pixels_y, random_pixels_x): image.data[amp][i] = 10000 expected_image_data = [image.data.copy() for image in images] # Simulate crosstalk for image in images: original_data = image.data.copy() for j in range(4): for i in range(4): if i != j: crosstalk_coeff = np.random.uniform(0.0, 0.01) image.header['CRSTLK{i}{j}'.format(i=i+1, j=j+1)] = crosstalk_coeff image.data[j] += original_data[i] * crosstalk_coeff # Try to remove it images = tester.do_stage(images) # Assert that we got back the original image for i, image in enumerate(images): np.testing.assert_allclose(image.data, expected_image_data[i], atol=2.0, rtol=1e-5)
def test_format_qc_results_new_info(): filename, results = qc.format_qc_results( { "key1": "value1", "key2": "value2" }, FakeImage()) assert results["key1"] == "value1" assert results["key2"] == "value2"
def test_format_qc_results_numpy_bool(): filename, results = qc.format_qc_results( { "normal_bool": True, "numpy_bool": np.bool_(True) }, FakeImage()) assert type(results["normal_bool"]) == bool assert type(results["numpy_bool"]) == bool
def test_flat_normalization_is_reasonable(set_random_seed): flat_variation = 0.05 input_level = 10000.0 nx = 101 ny = 103 normalizer = FlatNormalizer(None) image = FakeImage() flat_pattern = np.random.normal(1.0, flat_variation, size=(ny, nx)) image.data = np.random.poisson(flat_pattern * input_level).astype(float) image = normalizer.do_stage(image) # For right now, we only use a quarter of the image to calculate the flat normalization # because real ccds have crazy stuff at the edges, so the S/N is cut down by a factor of 2 # Assume 50% slop because the variation in the pattern does not decrease like sqrt(n) assert np.abs(image.header['FLATLVL'] - input_level) < (3.0 * flat_variation * input_level / (nx * ny) ** 0.5) assert np.abs(np.mean(image.data) - 1.0) <= 3.0 * flat_variation / (nx * ny) ** 0.5
def test_save_results_with_changed_parameters(): # Push the defualt test image info stage = Stage(FakeElasticsearchContext()) image = FakeImage() stage.save_qc_results({}, image) # Change the image parameters and push again image.site = "fake_site" image.instrument = "fake_instrument" image.epoch = str(int(image.epoch) + 1) image.dateobs += timedelta(days=1) # Post to ES and test results output = stage.save_qc_results({}, image, _source=True) assert output['result'] == 'updated' results = output['get']['_source'] assert results['site'] == image.site assert results['instrument'] == image.instrument assert results['dayobs'] == image.epoch assert results['timestamp'] == image.dateobs.strftime("%Y-%m-%dT%H:%M:%S")
def test_does_flag_bad_image(mock_cal, mock_frame, set_random_seed): mock_cal.return_value = 'test.fits' master_readnoise = 3.0 nx = 101 ny = 103 context = make_context_with_master_bias(readnoise=master_readnoise, nx=nx, ny=ny) comparer = BiasComparer(context) image = FakeImage(image_multiplier=0.0) image.data = np.random.normal(0.0, image.readnoise, size=(ny, nx)) x_indexes = np.random.choice(np.arange(nx), size=2000) y_indexes = np.random.choice(np.arange(ny), size=2000) for x, y in zip(x_indexes, y_indexes): image.data[y, x] = np.random.normal(100, image.readnoise) image = comparer.do_stage(image) assert image.is_bad
def test_flags_bad_if_no_master_calibration(mock_cal, mock_frame, set_random_seed): mock_cal.return_value = None nx = 101 ny = 103 context = make_context_with_master_bias(nx=nx, ny=ny) comparer = BiasComparer(context) image = comparer.do_stage(FakeImage(nx=nx, ny=ny)) assert image.is_bad is True
def test_format_qc_results_basic_info(): image = FakeImage() filename, results = qc.format_qc_results({}, image) assert results['site'] == image.site assert results['instrument'] == image.camera assert results['dayobs'] == image.epoch assert results['@timestamp'] == image.dateobs assert results['obstype'] == image.obstype assert filename in image.filename
def test_no_pixels_1000(): tester = ThousandsTest(None) nx = 101 ny = 103 images = [FakeImage(nx=nx, ny=ny) for x in range(6)] images = tester.do_stage(images) assert len(images) == 6
def test_header_checker_es_update(): # Set initial values stage = Stage(FakeElasticsearchContext()) image = FakeImage() header_checks = ['HeaderBadDecValue', 'HeaderBadRAValue', 'HeaderExptimeNegative', 'HeaderExptimeZero', 'HeaderKeywordsMissing', 'HeaderKeywordsNA'] header_check_booleans = {key: True for key in header_checks} stage.save_qc_results(header_check_booleans, image) # Run header sanity test tester = HeaderSanity(FakeElasticsearchContext()) for key in tester.header_expected_format.keys(): image.header[key] = 1.0 image.header['OBSTYPE'] = 'EXPOSE' tester.do_stage([image]) # Check info from elasticsearch results = elasticsearch.Elasticsearch(ES_URL).get_source( index=stage.ES_INDEX, doc_type=stage.ES_DOC_TYPE, id='test') for header_check in header_checks: assert not results[header_check]
def test_pointing_es_update(): # Set initial values stage = Stage(FakeElasticsearchContext()) image = FakeImage() stage.save_qc_results({'PointingSevere': True, 'PointingWarning': True, 'pointing_offset': 100.}, image) # Run pointing test image.header['CRVAL1'] = '1.0' image.header['CRVAL2'] = '-1.0' image.header['OFST-RA'] = '0:04:00.00' image.header['OFST-DEC'] = '-01:00:00.000' tester = PointingTest(FakeElasticsearchContext()) tester.do_stage([image]) # Check info from elasticsearch results = elasticsearch.Elasticsearch(ES_URL).get_source( index=stage.ES_INDEX, doc_type=stage.ES_DOC_TYPE, id='test') assert not results['PointingSevere'] assert not results['PointingWarning'] assert results['pointing_offset'] < 1E10
def test_nonzero_but_no_pixels_1000(): tester = ThousandsTest(None) nx = 101 ny = 103 images = [FakeImage(nx=nx, ny=ny) for x in range(6)] for image in images: image.data += 5 images = tester.do_stage(images) assert len(images) == 6
def test_pattern_noise_es_update(): # Set initial values stage = Stage(FakeElasticsearchContext()) image = FakeImage() stage.save_qc_results({'PatternNoise': True}, image) # Run pattern noise test tester = PatternNoiseDetector(FakeElasticsearchContext()) tester.do_stage([image]) # Check info from elasticsearch results = elasticsearch.Elasticsearch(ES_URL).get_source( index=stage.ES_INDEX, doc_type=stage.ES_DOC_TYPE, id='test') assert not results['PatternNoise']
def test_all_images_all_1000s(): tester = ThousandsTest(None) nx = 101 ny = 103 images = [FakeImage(nx=nx, ny=ny) for x in range(6)] for image in images: image.data[:, :] = 1000 images = tester.do_stage(images) assert len(images) == 0
def test_sinistro_1000s_es_update(): # Set initial values stage = Stage(FakeElasticsearchContext()) image = FakeImage() stage.save_qc_results({'Error1000s': True, 'fraction_1000s': 0.99}, image) # Run sinistro 1000s test tester = ThousandsTest(FakeElasticsearchContext()) tester.do_stage([image]) # Check info from elasticsearch results = elasticsearch.Elasticsearch(ES_URL).get_source( index=stage.ES_INDEX, doc_type=stage.ES_DOC_TYPE, id='test') assert not results['Error1000s'] assert results['fraction_1000s'] == 0.0
def test_no_pixels_saturated(): tester = SaturationTest(None) nx = 101 ny = 103 images = [FakeImage(nx=nx, ny=ny) for x in range(6)] for image in images: image.header['SATURATE'] = 65535 images = tester.do_stage(images) for image in images: assert image.header['SATFRAC'][0] == 0.0 assert len(images) == 6
def test_2d_is_not_3d(): test_image = FakeImage() assert not test_image.data_is_3d()
def test_3d_is_3d(): test_image = FakeImage(n_amps=4) assert test_image.data_is_3d()
def test_get_n_amps_2d(): n_amps = 4 test_image = FakeImage(n_amps=n_amps) assert test_image.get_n_amps() == n_amps
def test_get_n_amps_3d(): test_image = FakeImage() assert test_image.get_n_amps() == 1
def test_get_inner_image_section_3d(): test_image = FakeImage(n_amps=4) with pytest.raises(ValueError): test_image.get_inner_image_section()