Пример #1
0
def test_model(data, classes):
    model = metadata.create(classes=classes,
                            sites=23,
                            domains=15,
                            learning_rate=0.001)
    prediction = model.predict(data)
    assert prediction.shape == (1, classes)
Пример #2
0
    def create(self, weights=None, submodel=None):
        """Load a model
            Args:
                weights: a saved model weights from previous run
                name: a model name from DeepTreeAttention.models
            """
        self.classes = pd.read_csv(self.classes_file).shape[0]
        if self.config["train"]["gpus"] > 1:
            self.strategy = tf.distribute.MirroredStrategy()
            print("Running in parallel on {} GPUs".format(
                self.strategy.num_replicas_in_sync))
            self.config["train"]["batch_size"] = self.config["train"][
                "batch_size"] * self.strategy.num_replicas_in_sync
            with self.strategy.scope():
                self.HSI_model, self.HSI_spatial, self.HSI_spectral = Hang.create_models(
                    self.HSI_size, self.HSI_size, self.HSI_channels,
                    self.classes, self.config["train"]["learning_rate"])
                self.RGB_model, self.RGB_spatial, self.RGB_spectral = Hang.create_models(
                    self.RGB_size, self.RGB_size, self.RGB_channels,
                    self.classes, self.config["train"]["learning_rate"])

                #create a metadata model
                self.metadata_model = metadata.create(
                    classes=self.classes,
                    sites=self.sites,
                    domains=self.domains,
                    learning_rate=self.config["train"]["learning_rate"])
        else:
            self.HSI_model, self.HSI_spatial, self.HSI_spectral = Hang.create_models(
                self.HSI_size, self.HSI_size, self.HSI_channels, self.classes,
                self.config["train"]["learning_rate"])
            self.RGB_model, self.RGB_spatial, self.RGB_spectral = Hang.create_models(
                self.RGB_size, self.RGB_size, self.RGB_channels, self.classes,
                self.config["train"]["learning_rate"])

            #create a metadata model
            self.metadata_model = metadata.create(
                classes=self.classes,
                sites=self.sites,
                domains=self.domains,
                learning_rate=self.config["train"]["learning_rate"])
def test_ensemble(HSI_image, metadata_data):    
    batch, height, width, channels = HSI_image.shape     
    sensor_inputs, sensor_outputs, spatial, spectral = Hang.define_model(classes=2, height=height, width=width, channels=channels)    
    model1 = tf.keras.Model(inputs=sensor_inputs, outputs=sensor_outputs)
    
    metadata_model = metadata.create(classes=2, sites=10, domains =10, learning_rate=0.001)
    ensemble = Hang.learned_ensemble(HSI_model=model1, metadata_model=metadata_model, classes=2)
    prediction = ensemble.predict([HSI_image] + metadata_data)
    assert prediction.shape == (1, 2)
    
    
    
Пример #4
0
def ensemble_model():
    sensor_inputs, sensor_outputs, spatial, spectral = Hang2020_geographic.define_model(
        classes=2, height=20, width=20, channels=369)
    model1 = tf.keras.Model(inputs=sensor_inputs, outputs=sensor_outputs)

    metadata_model = metadata.create(classes=2,
                                     sites=10,
                                     domains=10,
                                     learning_rate=0.001)
    ensemble_model = Hang2020_geographic.learned_ensemble(
        HSI_model=model1, metadata_model=metadata_model, classes=2)
    ensemble_model = tf.keras.Model(
        ensemble_model.inputs,
        ensemble_model.get_layer("submodel_concat").output)

    return ensemble_model
Пример #5
0
def test_define(HSI_image, metadata_data):
    batch, height, width, channels = HSI_image.shape
    sensor_inputs, sensor_outputs, spatial, spectral = Hang.define_model(
        classes=2, height=height, width=width, channels=channels)
    model1 = tf.keras.Model(inputs=sensor_inputs, outputs=sensor_outputs)

    metadata_model = metadata.create(classes=2,
                                     sites=10,
                                     domains=10,
                                     learning_rate=0.001)
    ensemble = Hang.learned_ensemble(HSI_model=model1,
                                     metadata_model=metadata_model,
                                     classes=2)

    extractor = tf.keras.Model(ensemble.inputs, ensemble.output)

    neighbor_array = []
    neighbor_distance = []
    for x in np.arange(5):
        prediction = extractor.predict([HSI_image] + metadata_data)
        neighbor_array.append(prediction)
        neighbor_distance.append(np.random.rand())

    #stack and batch
    neighbor_array = np.vstack(neighbor_array)
    neighbor_array = np.expand_dims(neighbor_array, axis=0)

    neighbor_distance = np.expand_dims(neighbor_distance, axis=0)

    neighbor_model = neighbors_model.create(ensemble_model=ensemble,
                                            freeze=False,
                                            k_neighbors=5,
                                            classes=2)
    prediction = neighbor_model.predict([HSI_image] + metadata_data +
                                        [neighbor_array] + [neighbor_distance])

    assert prediction.shape == (1, 2)
Пример #6
0
    def create(self, weights=None, submodel=None):
        """Load a model
            Args:
                weights: a saved model weights from previous run
                name: a model name from DeepTreeAttention.models
            """
        self.classes = pd.read_csv(self.classes_file).shape[0]
        if self.config["train"]["gpus"] > 1:
            self.strategy = tf.distribute.MirroredStrategy()
            print("Running in parallel on {} GPUs".format(
                self.strategy.num_replicas_in_sync))
            self.config["train"]["batch_size"] = self.config["train"][
                "batch_size"] * self.strategy.num_replicas_in_sync
            with self.strategy.scope():
                self.HSI_model, self.HSI_spatial, self.HSI_spectral = Hang.create_models(
                    self.HSI_size, self.HSI_size, self.HSI_channels,
                    self.classes, self.config["train"]["learning_rate"])
                self.RGB_model, self.RGB_spatial, self.RGB_spectral = Hang.create_models(
                    self.RGB_size, self.RGB_size, self.RGB_channels,
                    self.classes, self.config["train"]["learning_rate"])

                #create a metadata model
                self.metadata_model = metadata.create(
                    classes=self.classes,
                    sites=self.sites,
                    domains=self.domains,
                    learning_rate=self.config["train"]["learning_rate"])
                self.ensemble_model = Hang.learned_ensemble(
                    HSI_model=self.HSI_model,
                    metadata_model=self.metadata_model,
                    freeze=self.config["ensemble"]["freeze"],
                    classes=self.classes)
                self.ensemble_model.compile(
                    loss="categorical_crossentropy",
                    optimizer=tf.keras.optimizers.Adam(
                        lr=float(self.config["train"]["learning_rate"])),
                    metrics=[tf.keras.metrics.CategoricalAccuracy(name='acc')])

        else:
            self.HSI_model, self.HSI_spatial, self.HSI_spectral = Hang.create_models(
                self.HSI_size, self.HSI_size, self.HSI_channels, self.classes,
                self.config["train"]["learning_rate"])
            self.RGB_model, self.RGB_spatial, self.RGB_spectral = Hang.create_models(
                self.RGB_size, self.RGB_size, self.RGB_channels, self.classes,
                self.config["train"]["learning_rate"])

            #create a metadata model
            self.metadata_model = metadata.create(
                classes=self.classes,
                sites=self.sites,
                domains=self.domains,
                learning_rate=self.config["train"]["learning_rate"])

            #create an ensemble model
            self.ensemble_model = Hang.learned_ensemble(
                HSI_model=self.HSI_model,
                metadata_model=self.metadata_model,
                freeze=self.config["train"]["ensemble"]["freeze"],
                classes=self.classes)

            #Compile ensemble
            self.ensemble_model.compile(
                loss="categorical_crossentropy",
                optimizer=tf.keras.optimizers.Adam(
                    lr=float(self.config["train"]["learning_rate"])),
                metrics=[tf.keras.metrics.CategoricalAccuracy(name='acc')])