Пример #1
0
 def create(self, weights=None, submodel=None):
     """Load a model
         Args:
             weights: a saved model weights from previous run
             name: a model name from DeepTreeAttention.models
         """
     self.HSI_model, self.HSI_spatial, self.HSI_spectral = Hang.create_models(self.HSI_size, self.HSI_size, self.HSI_channels, self.classes, self.config["train"]["learning_rate"])
     self.RGB_model, self.RGB_spatial, self.RGB_spectral = Hang.create_models(self.RGB_size, self.RGB_size, self.RGB_channels, self.classes, self.config["train"]["learning_rate"])
def test_ensemble(HSI_image, metadata_data):    
    batch, height, width, channels = HSI_image.shape     
    sensor_inputs, sensor_outputs, spatial, spectral = Hang.define_model(classes=2, height=height, width=width, channels=channels)    
    model1 = tf.keras.Model(inputs=sensor_inputs, outputs=sensor_outputs)
    
    metadata_model = metadata.create(classes=2, sites=10, domains =10, learning_rate=0.001)
    ensemble = Hang.learned_ensemble(HSI_model=model1, metadata_model=metadata_model, classes=2)
    prediction = ensemble.predict([HSI_image] + metadata_data)
    assert prediction.shape == (1, 2)
    
    
    
Пример #3
0
def test_ensemble(RGB_image, HSI_image):
    batch, height, width, channels = HSI_image[0].shape
    sensor_inputs, sensor_outputs, spatial, spectral = Hang.define_model(
        classes=2, height=height, width=width, channels=channels)
    model1 = tf.keras.Model(inputs=sensor_inputs, outputs=sensor_outputs)

    batch, height, width, channels = RGB_image[0].shape
    sensor_inputs, sensor_outputs, spatial, spectral = Hang.define_model(
        classes=2, height=height, width=width, channels=channels)
    model2 = tf.keras.Model(inputs=sensor_inputs, outputs=sensor_outputs)

    ensemble = Hang.ensemble(models=[model1, model2], classes=2)
    prediction = ensemble.predict([HSI_image[0], RGB_image[0]])
    assert prediction.shape == (1, 2)
Пример #4
0
def test_model(RGB_image, classes):
    batch, height, width, channels = RGB_image[0].shape
    sensor_inputs, sensor_outputs, spatial, spectral = Hang.define_model(
        classes=classes, height=height, width=width, channels=channels)
    model = tf.keras.Model(inputs=sensor_inputs, outputs=sensor_outputs)
    prediction = model.predict(RGB_image)
    assert prediction.shape == (1, classes)
Пример #5
0
def ensemble_model():
    sensor_inputs, sensor_outputs, spatial, spectral = Hang2020_geographic.define_model(
        classes=2, height=20, width=20, channels=369)
    model1 = tf.keras.Model(inputs=sensor_inputs, outputs=sensor_outputs)

    metadata_model = metadata.create(classes=2,
                                     sites=10,
                                     domains=10,
                                     learning_rate=0.001)
    ensemble_model = Hang2020_geographic.learned_ensemble(
        HSI_model=model1, metadata_model=metadata_model, classes=2)
    ensemble_model = tf.keras.Model(
        ensemble_model.inputs,
        ensemble_model.get_layer("submodel_concat").output)

    return ensemble_model
Пример #6
0
    def create(self, weights=None, submodel=None):
        """Load a model
            Args:
                weights: a saved model weights from previous run
                name: a model name from DeepTreeAttention.models
            """
        self.classes = pd.read_csv(self.classes_file).shape[0]
        if self.config["train"]["gpus"] > 1:
            self.strategy = tf.distribute.MirroredStrategy()
            print("Running in parallel on {} GPUs".format(
                self.strategy.num_replicas_in_sync))
            self.config["train"]["batch_size"] = self.config["train"][
                "batch_size"] * self.strategy.num_replicas_in_sync
            with self.strategy.scope():
                self.HSI_model, self.HSI_spatial, self.HSI_spectral = Hang.create_models(
                    self.HSI_size, self.HSI_size, self.HSI_channels,
                    self.classes, self.config["train"]["learning_rate"])
                self.RGB_model, self.RGB_spatial, self.RGB_spectral = Hang.create_models(
                    self.RGB_size, self.RGB_size, self.RGB_channels,
                    self.classes, self.config["train"]["learning_rate"])

                #create a metadata model
                self.metadata_model = metadata.create(
                    classes=self.classes,
                    sites=self.sites,
                    domains=self.domains,
                    learning_rate=self.config["train"]["learning_rate"])
        else:
            self.HSI_model, self.HSI_spatial, self.HSI_spectral = Hang.create_models(
                self.HSI_size, self.HSI_size, self.HSI_channels, self.classes,
                self.config["train"]["learning_rate"])
            self.RGB_model, self.RGB_spatial, self.RGB_spectral = Hang.create_models(
                self.RGB_size, self.RGB_size, self.RGB_channels, self.classes,
                self.config["train"]["learning_rate"])

            #create a metadata model
            self.metadata_model = metadata.create(
                classes=self.classes,
                sites=self.sites,
                domains=self.domains,
                learning_rate=self.config["train"]["learning_rate"])
Пример #7
0
def test_define(HSI_image, metadata_data):
    batch, height, width, channels = HSI_image.shape
    sensor_inputs, sensor_outputs, spatial, spectral = Hang.define_model(
        classes=2, height=height, width=width, channels=channels)
    model1 = tf.keras.Model(inputs=sensor_inputs, outputs=sensor_outputs)

    metadata_model = metadata.create(classes=2,
                                     sites=10,
                                     domains=10,
                                     learning_rate=0.001)
    ensemble = Hang.learned_ensemble(HSI_model=model1,
                                     metadata_model=metadata_model,
                                     classes=2)

    extractor = tf.keras.Model(ensemble.inputs, ensemble.output)

    neighbor_array = []
    neighbor_distance = []
    for x in np.arange(5):
        prediction = extractor.predict([HSI_image] + metadata_data)
        neighbor_array.append(prediction)
        neighbor_distance.append(np.random.rand())

    #stack and batch
    neighbor_array = np.vstack(neighbor_array)
    neighbor_array = np.expand_dims(neighbor_array, axis=0)

    neighbor_distance = np.expand_dims(neighbor_distance, axis=0)

    neighbor_model = neighbors_model.create(ensemble_model=ensemble,
                                            freeze=False,
                                            k_neighbors=5,
                                            classes=2)
    prediction = neighbor_model.predict([HSI_image] + metadata_data +
                                        [neighbor_array] + [neighbor_distance])

    assert prediction.shape == (1, 2)
Пример #8
0
 def ensemble(self, experiment, class_weight=None, freeze = True, train=True):
     #Manually override batch size
     self.config["train"]["batch_size"] = self.config["train"]["ensemble"]["batch_size"]
     self.read_data(mode="ensemble")
     self.ensemble_model = Hang.ensemble([self.HSI_model, self.RGB_model], freeze=freeze, classes=self.classes)
     
     if train:
         self.ensemble_model.compile(
             loss="categorical_crossentropy",
             optimizer=tf.keras.optimizers.Adam(
             lr=float(self.config["train"]["learning_rate"])),
             metrics=[tf.keras.metrics.CategoricalAccuracy(
                                                          name='acc')])
         
         if self.val_split is None:
             print("Cannot run callbacks without validation data, skipping...")
             callback_list = None
             label_names = None
         elif experiment is None:
             print("Cannot run callbacks without comet experiment, skipping...")
             callback_list = None
             label_names = None
         else:            
             if self.classes_file is not None:
                 labeldf = pd.read_csv(self.classes_file)                
                 label_names = list(labeldf.taxonID.values)
             else:
                 label_names = None
                 
             callback_list = callbacks.create(log_dir=self.log_dir,
                                              experiment=experiment,
                                              validation_data=self.val_split,
                                              train_data=self.train_split,
                                              label_names=label_names,
                                              submodel="ensemble")
                 
         #Train ensemble layer
         self.ensemble_model.fit(
             self.train_split,
             epochs=self.config["train"]["epochs"],
             validation_data=self.val_split,
             callbacks=callback_list,
             class_weight=class_weight)
Пример #9
0
    def create(self, weights=None, submodel=None):
        """Load a model
            Args:
                weights: a saved model weights from previous run
                name: a model name from DeepTreeAttention.models
            """
        self.classes = pd.read_csv(self.classes_file).shape[0]
        if self.config["train"]["gpus"] > 1:
            self.strategy = tf.distribute.MirroredStrategy()
            print("Running in parallel on {} GPUs".format(
                self.strategy.num_replicas_in_sync))
            self.config["train"]["batch_size"] = self.config["train"][
                "batch_size"] * self.strategy.num_replicas_in_sync
            with self.strategy.scope():
                self.HSI_model, self.HSI_spatial, self.HSI_spectral = Hang.create_models(
                    self.HSI_size, self.HSI_size, self.HSI_channels,
                    self.classes, self.config["train"]["learning_rate"])
                self.RGB_model, self.RGB_spatial, self.RGB_spectral = Hang.create_models(
                    self.RGB_size, self.RGB_size, self.RGB_channels,
                    self.classes, self.config["train"]["learning_rate"])

                #create a metadata model
                self.metadata_model = metadata.create(
                    classes=self.classes,
                    sites=self.sites,
                    domains=self.domains,
                    learning_rate=self.config["train"]["learning_rate"])
                self.ensemble_model = Hang.learned_ensemble(
                    HSI_model=self.HSI_model,
                    metadata_model=self.metadata_model,
                    freeze=self.config["ensemble"]["freeze"],
                    classes=self.classes)
                self.ensemble_model.compile(
                    loss="categorical_crossentropy",
                    optimizer=tf.keras.optimizers.Adam(
                        lr=float(self.config["train"]["learning_rate"])),
                    metrics=[tf.keras.metrics.CategoricalAccuracy(name='acc')])

        else:
            self.HSI_model, self.HSI_spatial, self.HSI_spectral = Hang.create_models(
                self.HSI_size, self.HSI_size, self.HSI_channels, self.classes,
                self.config["train"]["learning_rate"])
            self.RGB_model, self.RGB_spatial, self.RGB_spectral = Hang.create_models(
                self.RGB_size, self.RGB_size, self.RGB_channels, self.classes,
                self.config["train"]["learning_rate"])

            #create a metadata model
            self.metadata_model = metadata.create(
                classes=self.classes,
                sites=self.sites,
                domains=self.domains,
                learning_rate=self.config["train"]["learning_rate"])

            #create an ensemble model
            self.ensemble_model = Hang.learned_ensemble(
                HSI_model=self.HSI_model,
                metadata_model=self.metadata_model,
                freeze=self.config["train"]["ensemble"]["freeze"],
                classes=self.classes)

            #Compile ensemble
            self.ensemble_model.compile(
                loss="categorical_crossentropy",
                optimizer=tf.keras.optimizers.Adam(
                    lr=float(self.config["train"]["learning_rate"])),
                metrics=[tf.keras.metrics.CategoricalAccuracy(name='acc')])
Пример #10
0
    def ensemble(self, experiment, class_weight=None, freeze=True, train=True):
        self.classes = pd.read_csv(self.classes_file).shape[0]

        self.read_data(mode="ensemble")

        if self.val_split is None:
            print("Cannot run callbacks without validation data, skipping...")
            callback_list = None
            label_names = None
        elif experiment is None:
            print("Cannot run callbacks without comet experiment, skipping...")
            callback_list = None
            label_names = None
        else:
            if self.classes_file is not None:
                labeldf = pd.read_csv(self.classes_file)
                label_names = list(labeldf.taxonID.values)
            else:
                label_names = None

            callback_list = callbacks.create(log_dir=self.log_dir,
                                             experiment=experiment,
                                             validation_data=self.val_split,
                                             train_data=self.train_split,
                                             label_names=label_names,
                                             train_shp=self.train_shp,
                                             submodel="ensemble")

            print("callback list is {}".format(callback_list))

        if self.config["train"]["gpus"] > 1:
            with self.strategy.scope():
                self.ensemble_model = Hang.learned_ensemble(
                    HSI_model=self.HSI_model,
                    metadata_model=self.metadata_model,
                    freeze=freeze,
                    classes=self.classes)

                if train:
                    self.ensemble_model.compile(
                        loss="categorical_crossentropy",
                        optimizer=tf.keras.optimizers.Adam(
                            lr=float(self.config["train"]["learning_rate"])),
                        metrics=[
                            tf.keras.metrics.CategoricalAccuracy(name='acc')
                        ])
                    #Train ensemble layer
                    self.ensemble_model.fit(
                        self.train_split,
                        epochs=self.config["train"]["ensemble"]["epochs"],
                        validation_data=self.val_split,
                        callbacks=callback_list,
                        class_weight=class_weight)
        else:
            self.ensemble_model = Hang.learned_ensemble(
                HSI_model=self.HSI_model,
                metadata_model=self.metadata_model,
                freeze=freeze,
                classes=self.classes)
            if train:
                self.ensemble_model.compile(
                    loss="categorical_crossentropy",
                    optimizer=tf.keras.optimizers.Adam(
                        lr=float(self.config["train"]["learning_rate"])),
                    metrics=[tf.keras.metrics.CategoricalAccuracy(name='acc')])

                #Train ensemble layer
                self.ensemble_model.fit(
                    self.train_split,
                    epochs=self.config["train"]["ensemble"]["epochs"],
                    validation_data=self.val_split,
                    callbacks=callback_list,
                    class_weight=class_weight)