Example #1
0
    def __init__(self, inferrence_json_path, generator_obj):
        self.inferrence_json_path = inferrence_json_path
        self.generator_obj = generator_obj

        local_json_loader = JsonLoader(inferrence_json_path)
        local_json_loader.load_json()
        self.json_data = local_json_loader.json_data

        self.output_file = self.json_data["output_file"]

        if "save_raw" in self.json_data.keys():
            self.save_raw = self.json_data["save_raw"]
        else:
            self.save_raw = False

        if "rescale" in self.json_data.keys():
            self.rescale = self.json_data["rescale"]
        else:
            self.rescale = True

        self.batch_size = self.generator_obj.batch_size
        self.nb_datasets = len(self.generator_obj)
        self.indiv_shape = self.generator_obj.get_output_size()

        self.__load_model()
    def __init__(self, inferrence_json_path, generator_obj):
        self.inferrence_json_path = inferrence_json_path
        self.generator_obj = generator_obj

        local_json_loader = JsonLoader(inferrence_json_path)
        local_json_loader.load_json()
        self.json_data = local_json_loader.json_data

        self.output_file = self.json_data["output_file"]
        self.model_path = self.json_data["model_path"]

        if "save_raw" in self.json_data.keys():
            self.save_raw = self.json_data["save_raw"]
        else:
            self.save_raw = False

        if "rescale" in self.json_data.keys():
            self.rescale = self.json_data["rescale"]
        else:
            self.rescale = True

        self.batch_size = self.generator_obj.batch_size
        self.nb_datasets = len(self.generator_obj)
        self.indiv_shape = self.generator_obj.get_output_size()

        self.model = load_model(
            self.model_path,
            custom_objects={"annealed_loss": lc.loss_selector("annealed_loss")},
        )
Example #3
0
def unet_1024_search(path_json):
    local_json_loader = JsonLoader(path_json)
    local_json_loader.load_json()
    json_data = local_json_loader.json_data

    def local_network_function(input_img):

        # encoder
        local_input = input_img
        for local_depth in range(json_data["network_depth"]):
            local_conv = Conv2D(
                2 ** local_depth * json_data["nb_features_scale"],
                (3, 3),
                activation="relu",
                padding="same",
            )(local_input)
            local_output = MaxPooling2D(pool_size=(2, 2))(local_conv)
            if json_data["unet"]:
                if local_depth == 0:
                    u_net_shortcut = []
                u_net_shortcut.append(local_conv)
            local_input = local_output

        # Deep CONV
        deep_conv = Conv2D(
            2 ** json_data["network_depth"] * json_data["nb_features_scale"],
            (3, 3),
            activation="relu",
            padding="same",
        )(local_input)

        # decoder
        local_input = deep_conv
        for local_depth in range(json_data["network_depth"] - 1, -1, -1):
            local_up = UpSampling2D((2, 2))(local_input)
            if json_data["unet"]:
                local_conc = Concatenate()(
                    [local_up, u_net_shortcut[local_depth]])
            else:
                local_conc = local_up

            local_output = Conv2D(
                2 ** local_depth * json_data["nb_features_scale"],
                (3, 3),
                activation="relu",
                padding="same",
            )(local_conc)
            local_input = local_output

        # output layer
        final = Conv2D(1, (1, 1), activation=None,
                       padding="same")(local_output)

        return final

    return local_network_function
Example #4
0
    def __init__(self, inferrence_json_path, generator_obj):
        self.inferrence_json_path = inferrence_json_path
        self.generator_obj = generator_obj

        local_json_loader = JsonLoader(inferrence_json_path)
        local_json_loader.load_json()
        self.json_data = local_json_loader.json_data
        self.output_file = self.json_data["output_file"]
        self.model_path = self.json_data["model_path"]

        # This is used when output is a full volume to select only the center
        # currently only set to true. Future implementation could make smarter
        # scanning of the volume and leverage more
        # than just the center pixel
        if "single_voxel_output_single" in self.json_data.keys():
            self.single_voxel_output_single = self.json_data[
                "single_voxel_output_single"]
        else:
            self.single_voxel_output_single = True

        self.model_path = self.json_data["model_path"]

        self.model = load_model(self.model_path)
        self.input_data_size = self.generator_obj.data_shape
    def __init__(
        self,
        generator_obj,
        test_generator_obj,
        network_obj,
        trainer_json_path,
        auto_compile=True,
    ):

        self.network_obj = network_obj
        self.local_generator = generator_obj
        self.local_test_generator = test_generator_obj

        json_obj = JsonLoader(trainer_json_path)

        # the following line is to be backward compatible in case
        # new parameter logics are added.
        json_obj.set_default("apply_learning_decay", 0)

        json_data = json_obj.json_data
        self.output_dir = json_data["output_dir"]
        self.run_uid = json_data["run_uid"]
        self.model_string = json_data["model_string"]
        self.batch_size = json_data["batch_size"]
        self.steps_per_epoch = json_data["steps_per_epoch"]
        self.loss_type = json_data["loss"]
        self.nb_gpus = json_data["nb_gpus"]
        self.period_save = json_data["period_save"]
        self.learning_rate = json_data["learning_rate"]

        if 'checkpoints_dir' in json_data.keys():
            self.checkpoints_dir = json_data["checkpoints_dir"]
        else:
            self.checkpoints_dir = self.output_dir

        if "use_multiprocessing" in json_data.keys():
            self.use_multiprocessing = json_data["use_multiprocessing"]
        else:
            self.use_multiprocessing = True

        if "caching_validation" in json_data.keys():
            self.caching_validation = json_data["caching_validation"]
        else:
            self.caching_validation = True

        self.output_model_file_path = os.path.join(
            self.output_dir,
            self.run_uid + "_" + self.model_string + "_model.h5"
        )

        if "nb_workers" in json_data.keys():
            self.workers = json_data["nb_workers"]
        else:
            self.workers = 16

        # These parameters are related to setting up the
        # behavior of learning rates
        self.apply_learning_decay = json_data["apply_learning_decay"]

        if self.apply_learning_decay == 1:
            self.initial_learning_rate = json_data["initial_learning_rate"]
            self.epochs_drop = json_data["epochs_drop"]

        self.nb_times_through_data = json_data["nb_times_through_data"]

        # Generator has to be initialized first to provide
        # input size of network
        self.initialize_generator()

        if self.nb_gpus > 1:
            mirrored_strategy = tensorflow.distribute.MirroredStrategy()
            with mirrored_strategy.scope():
                if auto_compile:
                    self.initialize_network()

                self.initialize_callbacks()

                self.initialize_loss()

                self.initialize_optimizer()
                if auto_compile:
                    self.compile()
        else:
            if auto_compile:
                self.initialize_network()

            self.initialize_callbacks()

            self.initialize_loss()

            self.initialize_optimizer()

            if auto_compile:
                self.compile()
Example #6
0
 def __init__(self, json_path):
     local_json_loader = JsonLoader(json_path)
     local_json_loader.load_json()
     self.json_data = local_json_loader.json_data
     self.local_mean = 1
     self.local_std = 1
    def __init__(
        self,
        generator_obj,
        test_generator_obj,
        network_obj,
        trainer_json_path,
        auto_compile=True,
    ):

        self.network_obj = network_obj
        self.local_generator = generator_obj
        self.local_test_generator = test_generator_obj

        json_obj = JsonLoader(trainer_json_path)

        # the following line is to be backward compatible in case
        # new parameter logics are added.
        json_obj.set_default("apply_learning_decay", 0)

        json_data = json_obj.json_data
        self.output_dir = json_data["output_dir"]
        self.run_uid = json_data["run_uid"]
        self.model_string = json_data["model_string"]
        self.batch_size = json_data["batch_size"]
        self.steps_per_epoch = json_data["steps_per_epoch"]
        self.loss_type = json_data["loss"]
        self.nb_gpus = json_data["nb_gpus"]
        self.period_save = json_data["period_save"]
        self.learning_rate = json_data["learning_rate"]

        if "nb_workers" in json_data.keys():
            self.workers = json_data["nb_workers"]
        else:
            self.workers = 16

        # These parameters are related to setting up the
        # behavior of learning rates
        self.apply_learning_decay = json_data["apply_learning_decay"]

        if self.apply_learning_decay == 1:
            self.initial_learning_rate = json_data["initial_learning_rate"]
            self.epochs_drop = json_data["epochs_drop"]

        self.nb_times_through_data = json_data["nb_times_through_data"]

        # Generator has to be initialized first to provide
        # input size of network
        #self.initialize_generator()
        
        # modified by sk 2020/11/20
        
        # "nb_max_epoch" aims to reduce to total number of training
        # "nb_max_epoch" should not exceed "else" condition
        if "nb_max_epoch" in json_data.keys():
            self.epochs = json_data["nb_max_epoch"]
        else:
            self.epochs = self.nb_times_through_data * int(
                np.floor(len(self.local_generator) / self.steps_per_epoch)
            )

        if self.nb_gpus > 1:
            mirrored_strategy = tensorflow.distribute.MirroredStrategy()
            with mirrored_strategy.scope():
                self.initialize_network()

                self.initialize_callbacks()

                self.initialize_loss()

                self.initialize_optimizer()
                if auto_compile:
                    self.compile()
        else:
            if auto_compile:
                self.initialize_network()

            self.initialize_callbacks()

            self.initialize_loss()

            self.initialize_optimizer()

            if auto_compile:
                self.compile()