Ejemplo n.º 1
0
    def initialize(self, training=True, force_load_plans=False):
        """
        For prediction of test cases just set training=False, this will prevent loading of training data and
        training batchgenerator initialization
        :param training:
        :return:
        """
        if force_load_plans or (self.plans is None):
            self.load_plans_file()

        self.process_plans(self.plans)

        self.setup_DA_params()

        self.folder_with_preprocessed_data = join(self.dataset_directory, self.plans['data_identifier'] +
                                                  "_stage%d" % self.stage)
        if training:
            # copy segs from prev stage to separate folder and extract them

            # If we don't do this then we need to make sure to manually delete the folder if we want to update
            # segs_from_prev_stage. I will probably forget to do so, so I leave this in as a safeguard
            if isdir(self.folder_with_segs_from_prev_stage_for_train):
                shutil.rmtree(self.folder_with_segs_from_prev_stage_for_train)

            maybe_mkdir_p(self.folder_with_segs_from_prev_stage_for_train)
            segs_from_prev_stage_files = subfiles(self.folder_with_segs_from_prev_stage, suffix='.npz')
            for s in segs_from_prev_stage_files:
                shutil.copy(s, self.folder_with_segs_from_prev_stage_for_train)

            # if we don't do this then performance is shit
            if self.unpack_data:
                unpack_dataset(self.folder_with_segs_from_prev_stage_for_train)

            self.folder_with_segs_from_prev_stage = self.folder_with_segs_from_prev_stage_for_train

            self.setup_DA_params()

            if self.folder_with_preprocessed_data is not None:
                self.dl_tr, self.dl_val = self.get_basic_generators()

                if self.unpack_data:
                    print("unpacking dataset")
                    unpack_dataset(self.folder_with_preprocessed_data)
                    print("done")
                else:
                    print(
                        "INFO: Not unpacking data! Training may be slow due to that. Pray you are not using 2d or you "
                        "will wait all winter for your model to finish!")

                self.tr_gen, self.val_gen = get_default_augmentation(self.dl_tr, self.dl_val,
                                                                     self.data_aug_params['patch_size_for_spatialtransform'],
                                                                     self.data_aug_params)
                self.print_to_log_file("TRAINING KEYS:\n %s" % (str(self.dataset_tr.keys())))
                self.print_to_log_file("VALIDATION KEYS:\n %s" % (str(self.dataset_val.keys())))
        else:
            pass
        self.initialize_network_optimizer_and_scheduler()
        assert isinstance(self.network, SegmentationNetwork)
        self.was_initialized = True
Ejemplo n.º 2
0
    def initialize(self, training=True, force_load_plans=False):
        """
        For prediction of test cases just set training=False, this will prevent loading of training data and
        training batchgenerator initialization
        :param training:
        :return:
        """

        maybe_mkdir_p(self.output_folder)

        if force_load_plans or (self.plans is None):
            self.load_plans_file()

        self.process_plans(self.plans)

        self.setup_DA_params()
        self.AutoAugment = AutoAugment(
            self.data_aug_params['patch_size_for_spatialtransform'],
            self.data_aug_params)
        self.folder_with_preprocessed_data = join(
            self.dataset_directory,
            self.plans['data_identifier'] + "_stage%d" % self.stage)
        if training:
            self.dl_tr, self.dl_val = self.get_basic_generators()
            if self.unpack_data:
                self.print_to_log_file("unpacking dataset")
                unpack_dataset(self.folder_with_preprocessed_data)
                self.print_to_log_file("done")
            else:
                self.print_to_log_file(
                    "INFO: Not unpacking data! Training may be slow due to that. Pray you are not using 2d or you "
                    "will wait all winter for your model to finish!")
            self.tr_gen, self.val_gen = get_default_augmentation(
                self.dl_tr, self.dl_val,
                self.data_aug_params['patch_size_for_spatialtransform'],
                self.data_aug_params)
            self.print_to_log_file("TRAINING KEYS:\n %s" %
                                   (str(self.dataset_tr.keys())),
                                   also_print_to_console=False)
            self.print_to_log_file("VALIDATION KEYS:\n %s" %
                                   (str(self.dataset_val.keys())),
                                   also_print_to_console=False)
        else:
            pass
        self.initialize_network_optimizer_and_scheduler()
        # assert isinstance(self.network, (SegmentationNetwork, nn.DataParallel))
        self.was_initialized = True