コード例 #1
0
def crop(task_string, override=False, num_threads=8):
    cropped_out_dir = join(cropped_output_dir, task_string)
    maybe_mkdir_p(cropped_out_dir)

    if override and isdir(cropped_out_dir):
        shutil.rmtree(cropped_out_dir)
        maybe_mkdir_p(cropped_out_dir)

    splitted_4d_output_dir_task = join(splitted_4d_output_dir, task_string)
    lists, _ = create_lists_from_splitted_dataset(splitted_4d_output_dir_task)

    imgcrop = ImageCropper(num_threads, cropped_out_dir)
    imgcrop.run_cropping(lists, overwrite_existing=override)
    shutil.copy(join(splitted_4d_output_dir, task_string, "dataset.json"), cropped_out_dir)
コード例 #2
0
def crop(task_string, override=False, num_threads=default_num_threads):
    cropped_out_dir = join(nnUNet_cropped_data, task_string)
    os.makedirs(cropped_out_dir, exist_ok=True)

    if override and isdir(cropped_out_dir):
        shutil.rmtree(cropped_out_dir)
        os.makedirs(cropped_out_dir, exist_ok=True)

    splitted_4d_output_dir_task = join(nnUNet_raw_data, task_string)
    lists, _ = create_lists_from_splitted_dataset(splitted_4d_output_dir_task)

    imgcrop = ImageCropper(num_threads, cropped_out_dir)
    imgcrop.run_cropping(lists, overwrite_existing=override)
    shutil.copy(join(nnUNet_raw_data, task_string, "dataset.json"), cropped_out_dir)
コード例 #3
0
def crop(task_string, override=False, num_threads=default_num_threads):
    cropped_out_dir = join(nnUNet_cropped_data, task_string)
    maybe_mkdir_p(cropped_out_dir)

    if override and isdir(cropped_out_dir):
        shutil.rmtree(cropped_out_dir)
        maybe_mkdir_p(cropped_out_dir)

    splitted_4d_output_dir_task = join(nnUNet_raw_data, task_string)
    lists, _ = create_lists_from_splitted_dataset(splitted_4d_output_dir_task)

    # from nnunet.preprocessing.cropping import ImageCropper
    imgcrop = ImageCropper(num_threads, cropped_out_dir)
    # run_cropping
    imgcrop.run_cropping(lists, overwrite_existing=override)
    shutil.copy(join(nnUNet_raw_data, task_string, "dataset.json"),
                cropped_out_dir)
コード例 #4
0
ファイル: preprocessing.py プロジェクト: whyuek/nnUNet
    def preprocess_test_case(self, data_files, target_spacing, seg_file=None, force_separate_z=None):
        data, seg, properties = ImageCropper.crop_from_list_of_files(data_files, seg_file)

        data = data.transpose((0, *[i + 1 for i in self.transpose_forward]))
        seg = seg.transpose((0, *[i + 1 for i in self.transpose_forward]))

        data, seg, properties = self.resample_and_normalize(data, target_spacing, properties, seg,
                                                            force_separate_z=force_separate_z)
        return data.astype(np.float32), seg, properties
コード例 #5
0
ファイル: torch_predict.py プロジェクト: sjjdd/torch_nnunet
    def generate_data(self, data_path):
        # form list of file names
        # expected_num_modalities = self.trainer.plans['num_modalities']

        # # check input folder integrity
        # case_ids = check_input_folder_and_return_caseIDs(data_path, expected_num_modalities)

        # all_files = subfiles(data_path, suffix=".nii.gz", join=False, sort=True)
        # list_of_lists = [[join(data_path, i) for i in all_files if i[:len(j)].startswith(j) and
        #               len(i) == (len(j) + 12)] for j in case_ids]

        # part_id = 0
        # num_parts = 1

        file_name = [
            '/home/zhiyuan/project/data/nnUNet_raw/nnUNet_raw_data/Task004_Hippocampus/imagesTs/hippocampus_392_0000.nii.gz'
        ]
        data, seg, properties = ImageCropper.crop_from_list_of_files(file_name)
        plans = self.trainer.plans
        if plans.get('transpose_forward') is None or plans.get(
                'transpose_backward') is None:
            print(
                "WARNING! You seem to have data that was preprocessed with a previous version of nnU-Net. "
                "You should rerun preprocessing. We will proceed and assume that both transpose_foward "
                "and transpose_backward are [0, 1, 2]. If that is not correct then weird things will happen!"
            )
            plans['transpose_forward'] = [0, 1, 2]
            plans['transpose_backward'] = [0, 1, 2]

        # 1. From preprocessing.GenericPreprocessor.preprocess_test_case
        preprocessor = GenericPreprocessor(
            plans['normalization_schemes'], plans['use_mask_for_norm'],
            plans['transpose_forward'],
            plans['dataset_properties']['intensityproperties'])

        data = data.transpose(
            (0, *[i + 1 for i in preprocessor.transpose_forward]))
        seg = seg.transpose(
            (0, *[i + 1 for i in preprocessor.transpose_forward]))

        data, seg, properties = preprocessor.resample_and_normalize(
            data,
            plans['plans_per_stage'][self.trainer.stage]['current_spacing'],
            properties,
            seg,
            force_separate_z=None)

        output_data = data.astype(np.float32)

        return output_data, seg, properties