Пример #1
0
    def testZeroHiddenLayers(self):
        # Build config.
        feature_spec = {
            "time_feature_1": {
                "length": 10,
                "is_time_series": True,
            },
            "time_feature_2": {
                "length": 10,
                "is_time_series": True,
            },
            "aux_feature_1": {
                "length": 1,
                "is_time_series": False,
            },
        }
        config = configurations.base()
        config["inputs"]["features"] = feature_spec
        config = configdict.ConfigDict(config)
        config.hparams.output_dim = 1
        config.hparams.num_pre_logits_hidden_layers = 0

        # Build model.
        features = input_ops.build_feature_placeholders(config.inputs.features)
        labels = input_ops.build_labels_placeholder()
        model = astro_model.AstroModel(features, labels, config.hparams,
                                       tf.estimator.ModeKeys.TRAIN)
        model.build()

        # Validate Tensor shapes.
        self.assertShapeEquals((None, 21), model.pre_logits_concat)
        logits_w = testing.get_variable_by_name("logits/kernel")
        self.assertShapeEquals((21, 1), logits_w)
Пример #2
0
    def testOneTimeSeriesFeature(self):
        # Build config.
        feature_spec = {
            "time_feature_1": {
                "length": 10,
                "is_time_series": True,
            }
        }
        config = configurations.base()
        config["inputs"]["features"] = feature_spec
        config = configdict.ConfigDict(config)

        # Build model.
        features = input_ops.build_feature_placeholders(config.inputs.features)
        labels = input_ops.build_labels_placeholder()
        model = astro_model.AstroModel(features, labels, config.hparams,
                                       tf.estimator.ModeKeys.TRAIN)
        model.build()

        # Validate hidden layers.
        self.assertItemsEqual(["time_feature_1"],
                              model.time_series_hidden_layers.keys())
        self.assertIs(model.time_series_features["time_feature_1"],
                      model.time_series_hidden_layers["time_feature_1"])
        self.assertEqual(len(model.aux_hidden_layers), 0)
        self.assertIs(model.time_series_features["time_feature_1"],
                      model.pre_logits_concat)
Пример #3
0
    def testInvalidModeRaisesError(self):
        # Build config.
        config = configdict.ConfigDict(configurations.base())

        # Build model.
        features = input_ops.build_feature_placeholders(config.inputs.features)
        labels = input_ops.build_labels_placeholder()
        with self.assertRaises(ValueError):
            _ = astro_model.AstroModel(features, labels, config.hparams,
                                       "training")
Пример #4
0
    def testZeroFeaturesRaisesError(self):
        # Build config.
        config = configurations.base()
        config["inputs"]["features"] = {}
        config = configdict.ConfigDict(config)

        # Build model.
        features = input_ops.build_feature_placeholders(config.inputs.features)
        labels = input_ops.build_labels_placeholder()
        model = astro_model.AstroModel(features, labels, config.hparams,
                                       tf.estimator.ModeKeys.TRAIN)
        with self.assertRaises(ValueError):
            # Raises ValueError because at least one feature is required.
            model.build()
Пример #5
0
    def testEvalMode(self):
        # Build config.
        feature_spec = {
            "time_feature_1": {
                "length": 10,
                "is_time_series": True,
            },
            "time_feature_2": {
                "length": 10,
                "is_time_series": True,
            },
            "aux_feature_1": {
                "length": 1,
                "is_time_series": False,
            },
        }
        config = configurations.base()
        config["inputs"]["features"] = feature_spec
        config = configdict.ConfigDict(config)
        config.hparams.output_dim = 1

        # Build model.
        features = input_ops.build_feature_placeholders(config.inputs.features)
        labels = input_ops.build_labels_placeholder()
        model = astro_model.AstroModel(features, labels, config.hparams,
                                       tf.estimator.ModeKeys.TRAIN)
        model.build()

        # Validate Tensor shapes.
        self.assertShapeEquals((None, 21), model.pre_logits_concat)
        self.assertShapeEquals((None, 1), model.logits)
        self.assertShapeEquals((None, 1), model.predictions)
        self.assertShapeEquals((None, ), model.batch_losses)
        self.assertShapeEquals((), model.total_loss)

        # Execute the TensorFlow graph.
        scaffold = tf.train.Scaffold()
        scaffold.finalize()
        with self.test_session() as sess:
            sess.run([scaffold.init_op, scaffold.local_init_op])
            step = sess.run(model.global_step)
            self.assertEqual(0, step)

            # Fetch total loss.
            features = testing.fake_features(feature_spec, batch_size=16)
            labels = testing.fake_labels(config.hparams.output_dim,
                                         batch_size=16)
            feed_dict = input_ops.prepare_feed_dict(model, features, labels)
            total_loss = sess.run(model.total_loss, feed_dict=feed_dict)
            self.assertShapeEquals((), total_loss)
Пример #6
0
def local_global():
    """Base configuration for a CNN model with separate local/global views."""
    config = parent_configs.base()

    # Override the model features to be local_view and global_view time series.
    config["inputs"]["features"] = {
        "local_view": {
            "length": 201,
            "is_time_series": True,
        },
        "global_view": {
            "length": 2001,
            "is_time_series": True,
        },
    }

    # Add configurations for the convolutional layers of time series features.
    config["hparams"]["time_series_hidden"] = {
        "local_view": {
            "cnn_num_blocks": 2,
            "cnn_block_size": 2,
            "cnn_initial_num_filters": 16,
            "cnn_block_filter_factor": 2,
            "cnn_kernel_size": 5,
            "convolution_padding": "same",
            "pool_size": 7,
            "pool_strides": 2,
        },
        "global_view": {
            "cnn_num_blocks": 5,
            "cnn_block_size": 2,
            "cnn_initial_num_filters": 16,
            "cnn_block_filter_factor": 2,
            "cnn_kernel_size": 5,
            "convolution_padding": "same",
            "pool_size": 5,
            "pool_strides": 2,
        },
    }
    config["hparams"]["num_pre_logits_hidden_layers"] = 4
    config["hparams"]["pre_logits_hidden_layer_size"] = 512
    return config
Пример #7
0
def base():
    """Base configuration for a CNN model with a single global view."""
    config = parent_configs.base()

    # Add configuration for the convolutional layers of the global_view feature.
    config["hparams"]["time_series_hidden"] = {
        "global_view": {
            "cnn_num_blocks": 5,
            "cnn_block_size": 2,
            "cnn_initial_num_filters": 16,
            "cnn_block_filter_factor": 2,
            "cnn_kernel_size": 5,
            "convolution_padding": "same",
            "pool_size": 5,
            "pool_strides": 2,
        },
    }
    config["hparams"]["num_pre_logits_hidden_layers"] = 4
    config["hparams"]["pre_logits_hidden_layer_size"] = 1024
    return config
Пример #8
0
def base():
    """Base config for a fully connected model with a single global view."""
    config = parent_configs.base()

    # Add configuration for the fully-connected layers of the global_view feature.
    config["hparams"]["time_series_hidden"] = {
        "global_view": {
            "num_local_layers": 0,
            "local_layer_size": 128,

            # If > 0, the first layer is implemented as a wide convolutional layer
            # for invariance to small translations.
            "translation_delta": 0,

            # Pooling type following the wide convolutional layer.
            "pooling_type": "max",

            # Dropout rate for the fully connected layers.
            "dropout_rate": 0.0,
        },
    }
    return config
Пример #9
0
def local_global():
    """Base config for a locally fully connected model with local/global views."""
    config = parent_configs.base()

    # Override the model features to be local_view and global_view time series.
    config["inputs"]["features"] = {
        "local_view": {
            "length": 201,
            "is_time_series": True,
        },
        "global_view": {
            "length": 2001,
            "name_in_proto": "light_curve",
            "is_time_series": True,
            "data_source": "",
        },
    }

    # Add configurations for the fully-connected layers of time series features.
    config["hparams"]["time_series_hidden"] = {
        "local_view": {
            "num_local_layers": 0,
            "local_layer_size": 128,
            "translation_delta": 0,  # For wide convolution.
            "pooling_type": "max",  # For wide convolution.
            "dropout_rate": 0.0,
        },
        "global_view": {
            "num_local_layers": 0,
            "local_layer_size": 128,
            "translation_delta": 0,  # For wide convolution.
            "pooling_type": "max",  # For wide convolution.
            "dropout_rate": 0.0,
        },
    }
    return config