Beispiel #1
0
    def test_can_preprocess_standard_light_curve_with_passed_functions(self):
        database = StandardAndInjectedLightCurveDatabase()
        stub_load_times_fluxes_flux_errors_function = Mock(
            return_value=(np.array([0, -1, -2]), np.array([0, 1, 2]), None))
        mock_load_label_function = Mock(return_value=3)
        path_tensor = tf.constant('stub_path.fits')
        database.preprocess_light_curve = lambda identity, *args, **kwargs: identity

        # noinspection PyTypeChecker
        example, label = database.preprocess_standard_light_curve(
            load_times_fluxes_and_flux_errors_from_path_function=stub_load_times_fluxes_flux_errors_function,
            load_auxiliary_information_for_path_function=lambda path: np.array([], dtype=np.float32),
            load_label_from_path_function=mock_load_label_function, light_curve_path_tensor=path_tensor)

        assert np.array_equal(example, [[0], [1], [2]])
        assert np.array_equal(label, [3])
Beispiel #2
0
 def test_database_has_light_curve_collection_properties(self):
     database = StandardAndInjectedLightCurveDatabase()
     assert hasattr(database, 'training_standard_light_curve_collections')
     assert hasattr(database, 'training_injectee_light_curve_collection')
     assert hasattr(database, 'training_injectable_light_curve_collections')
     assert hasattr(database, 'validation_standard_light_curve_collections')
     assert hasattr(database, 'validation_injectee_light_curve_collection')
     assert hasattr(database, 'validation_injectable_light_curve_collections')
Beispiel #3
0
    def test_can_preprocess_injected_light_curve_with_passed_functions(self):
        database = StandardAndInjectedLightCurveDatabase()
        stub_load_function = Mock(return_value=(np.array([0, -1, -2]), np.array([0, 1, 2]), None))
        mock_load_label_function = Mock(return_value=3)
        path_tensor = tf.constant('stub_path.fits')
        database.preprocess_light_curve = lambda identity, *args, **kwargs: identity
        database.inject_signal_into_light_curve = lambda identity, *args, **kwargs: identity

        # noinspection PyTypeChecker
        example, label = database.preprocess_injected_light_curve(
            injectee_load_times_fluxes_and_flux_errors_from_path_function=stub_load_function,
            injectable_load_times_magnifications_and_magnification_errors_from_path_function=stub_load_function,
            load_label_from_path_function=mock_load_label_function, injectee_light_curve_path_tensor=path_tensor,
            injectable_light_curve_path_tensor=path_tensor)

        assert np.array_equal(example, [[0], [1], [2]])
        assert np.array_equal(label, [3])
Beispiel #4
0
 def test_grouping_from_light_curve_auxiliary_and_label_to_observation_and_label(self):
     light_curve_dataset = tf.data.Dataset.from_tensor_slices([[0, 0], [2, 2], [4, 4]])
     auxiliary_dataset = tf.data.Dataset.from_tensor_slices([[0], [20], [40]])
     label_dataset = tf.data.Dataset.from_tensor_slices([[0], [-2], [-4]])
     light_curve_auxiliary_and_label_dataset = tf.data.Dataset.zip(
         (light_curve_dataset, auxiliary_dataset, label_dataset))
     observation_and_label_dataset = StandardAndInjectedLightCurveDatabase() \
         .from_light_curve_auxiliary_and_label_to_observation_and_label(light_curve_auxiliary_and_label_dataset)
     dataset_iterator = iter(observation_and_label_dataset)
     observation_and_label0 = next(dataset_iterator)
     assert np.array_equal(observation_and_label0[0][0], [0, 0])
     assert np.array_equal(observation_and_label0[0][1], [0])
     assert np.array_equal(observation_and_label0[1], [0])
     observation_and_label1 = next(dataset_iterator)
     assert np.array_equal(observation_and_label1[0][0], [2, 2])
     assert np.array_equal(observation_and_label1[0][1], [20])
     assert np.array_equal(observation_and_label1[1], [-2])
Beispiel #5
0
 def test_can_specify_a_label_with_more_then_size_one_in_preprocessor(self):
     database = StandardAndInjectedLightCurveDatabase()
     database.number_of_parallel_processes_per_map = 1
     database.time_steps_per_example = 3
     database.number_of_label_values = 2
     stub_load_times_fluxes_and_flux_errors = lambda path: (np.array([0, -1, -2]), np.array([0, 1, 2]), None)
     expected_label = np.array([0, 1])
     stub_load_label_function = lambda path: expected_label
     stub_load_auxiliary_data_function = lambda path: np.array([], dtype=np.float32)
     paths_dataset = tf.data.Dataset.from_tensor_slices(['a.fits', 'b.fits'])
     dataset = database.generate_standard_light_curve_and_label_dataset(paths_dataset,
                                                                        stub_load_times_fluxes_and_flux_errors,
                                                                        stub_load_auxiliary_data_function,
                                                                        stub_load_label_function)
     dataset_list = list(dataset)
     assert np.array_equal(dataset_list[0][1], expected_label)
Beispiel #6
0
 def test_database_can_generate_training_and_validation_datasets_with_auxiliary_input(self):
     database = StandardAndInjectedLightCurveDatabase()
     light_curve_collection0 = LightCurveCollection()
     light_curve_collection0.get_paths = lambda: [Path('path0.ext')]
     light_curve_collection0.load_times_and_fluxes_from_path = lambda path: (np.array([90, 100, 110]),
                                                                             np.array([0, 1, 2]))
     light_curve_collection0.label = 0
     light_curve_collection0.load_auxiliary_information_for_path = lambda path: np.array([3, 4])
     database.training_standard_light_curve_collections = [light_curve_collection0]
     database.validation_standard_light_curve_collections = [light_curve_collection0]
     database.remove_random_elements = lambda x: x  # Don't randomize values to keep it simple.
     database.randomly_roll_elements = lambda x: x  # Don't randomize values to keep it simple.
     database.normalize_on_percentiles = lambda fluxes: fluxes  # Don't normalize values to keep it simple.
     database.batch_size = 4
     database.time_steps_per_example = 3
     database.number_of_parallel_processes_per_map = 1
     database.number_of_auxiliary_values = 2
     training_dataset, validation_dataset = database.generate_datasets()
     training_batch = next(iter(training_dataset))
     training_batch_observations = training_batch[0]
     training_batch_labels = training_batch[1]
     assert training_batch_observations[0].shape == (database.batch_size, 3, 1)
     assert training_batch_observations[1].shape == (database.batch_size, 2)
     assert training_batch_labels.shape == (database.batch_size, 1)
     assert np.array_equal(training_batch_observations[0][0].numpy(), [[0], [1], [2]])  # Light curve
     assert np.array_equal(training_batch_observations[1][0].numpy(), [3, 4])  # Auxiliary
     assert np.array_equal(training_batch_labels[0].numpy(), [0])  # Label.
     validation_batch = next(iter(validation_dataset))
     validation_batch_observations = validation_batch[0]
     validation_batch_labels = validation_batch[1]
     assert np.array_equal(validation_batch_observations[0][0].numpy(), [[0], [1], [2]])  # Light curve
     assert np.array_equal(validation_batch_observations[1][0].numpy(), [3, 4])  # Auxiliary
     assert np.array_equal(validation_batch_labels[0].numpy(), [0])  # Label.
Beispiel #7
0
 def test_expand_label_to_training_dimensions(self, original_label, expected_label):
     database = StandardAndInjectedLightCurveDatabase()
     label = database.expand_label_to_training_dimensions(original_label)
     assert type(label) is np.ndarray
     assert np.array_equal(label, expected_label)
Beispiel #8
0
    def database_with_collections(self) -> StandardAndInjectedLightCurveDatabase:
        """A fixture of the database with light_curve collections pre-prepared"""
        database = StandardAndInjectedLightCurveDatabase()
        # Setup mock light_curve collections.
        standard_light_curve_collection0 = LightCurveCollection()
        standard_light_curve_collection0.get_paths = lambda: [Path('standard_path0.ext')]
        standard_light_curve_collection0.load_times_and_fluxes_from_path = lambda path: (np.array([10, 20, 30]),
                                                                                         np.array([0, 1, 2]))
        standard_light_curve_collection0.label = 0
        standard_light_curve_collection1 = LightCurveCollection()
        standard_light_curve_collection1.get_paths = lambda: [Path('standard_path1.ext')]
        standard_light_curve_collection1.load_times_and_fluxes_from_path = lambda path: (np.array([20, 30, 40]),
                                                                                         np.array([1, 2, 3]))
        standard_light_curve_collection1.label = 1
        injectee_light_curve_collection = LightCurveCollection()
        injectee_light_curve_collection.get_paths = lambda: [Path('injectee_path.ext')]
        injectee_light_curve_collection.load_times_and_fluxes_from_path = lambda path: (np.array([30, 40, 50]),
                                                                                        np.array([2, 3, 4]))
        injectee_light_curve_collection.label = 0
        injectable_light_curve_collection0 = LightCurveCollection()
        injectable_light_curve_collection0.get_paths = lambda: [Path('injectable_path0.ext')]
        injectable_light_curve_collection0.load_times_and_magnifications_from_path = lambda path: (
            np.array([0, 10, 20]), np.array([0.5, 1, 1.5]))
        injectable_light_curve_collection0.label = 0
        injectable_light_curve_collection1 = LightCurveCollection()
        injectable_light_curve_collection1.get_paths = lambda: [Path('injectable_path1.ext')]
        injectable_light_curve_collection1.load_times_and_magnifications_from_path = lambda path: (
            np.array([0, 10, 20, 30]), np.array([0, 1, 1, 0]))
        injectable_light_curve_collection1.label = 1
        database.training_standard_light_curve_collections = [standard_light_curve_collection0,
                                                              standard_light_curve_collection1]
        database.training_injectee_light_curve_collection = injectee_light_curve_collection
        database.training_injectable_light_curve_collections = [injectable_light_curve_collection0,
                                                                injectable_light_curve_collection1]
        database.validation_standard_light_curve_collections = [standard_light_curve_collection1]
        database.validation_injectee_light_curve_collection = injectee_light_curve_collection
        database.validation_injectable_light_curve_collections = [injectable_light_curve_collection1]
        # Setup simplified database settings
        database.batch_size = 4
        database.time_steps_per_example = 3
        database.number_of_parallel_processes_per_map = 1

        def mock_window(dataset, batch_size, window_shift):
            return dataset.batch(batch_size)

        database.window_dataset_for_zipped_example_and_label_dataset = mock_window  # Disable windowing.
        database.normalize_on_percentiles = lambda fluxes: fluxes  # Don't normalize values to keep it simple.
        return database
Beispiel #9
0
 def database(self) -> StandardAndInjectedLightCurveDatabase:
     """A fixture of a blank database."""
     return StandardAndInjectedLightCurveDatabase()