Exemplo n.º 1
0
def change_voxel_spacing(target=voxel_spacing):
    """
    this function loads the CAT08 data and resamples it to voxel size 0.5*0.5*0.5 mmm
    the processed data will be stored in ./processed_training
    """

    dataset_dirs = [
        "../training/dataset0%d/image0%d.mhd" % (idx, idx) for idx in range(8)
    ]

    for idx in range(8):
        print("\nprocessing dataset0" + str(idx))

        scan, _, spacing = load_itk(dataset_dirs[idx])
        spacing = spacing[::-1]
        print("spacing: " + str(spacing))

        # resample the images to achieve a voxel size of 0.5mm^3
        x_zoom, y_zoom, z_zoom = spacing / voxel_spacing
        print("initial shape: " + str(scan.shape))
        scan = ndimage.zoom(scan, (x_zoom, y_zoom, z_zoom))
        print("final shape: " + str(scan.shape))

        # save array as .npz file
        savez_compressed("../processed_training/%d.npz" % idx)
Exemplo n.º 2
0
def generate_figure(idx):
    scan, origin, spacing = load_itk("./training/dataset0%d/image0%d.mhd" %
                                     (idx, idx))

    fig = plt.figure(figsize=(15, 8))
    ax = fig.add_subplot(111, projection='3d')
    ax.set_xlabel('X Label')
    ax.set_ylabel('Y Label')
    ax.set_zlabel('Z Label')

    for image_id in range(4):
        path = "./training/dataset0%d/vessel%s/reference.txt" % (idx,
                                                                 str(image_id))
        print(path)
        points = load_points(path)
        X, Y, Z = points[:, 0], points[:, 1], points[:, 2]
        X, Y, Z = X / spacing[1], Y / spacing[1], Z / spacing[0]
        ax.scatter(X, Y, Z, s=0.3, c=np.arange(len(points)))

    plt.axis("square")
    plt.savefig("./visualizations/" + str(idx) + ".png")
Exemplo n.º 3
0
def get_batch(dataset_idx, vessel_idx, batch_size=32):
    """
    :param dataset_idx: dataset to use
    :param vessel_idx: vessel to use
    :param batch_size: size of the batch gee
    :return: a batch of data from the given dataset and vessel
    """

    print("-", end="")
    reference_points = load_reference_points("./preprocessing/reference_directions.txt")
    probs, radii, directions, input_data = [], [], [], []

    points_path = join(training_dir, "dataset0%d/vessel%s/reference.txt" % (dataset_idx, str(vessel_idx)))
    points = load_points(points_path)

    image, _, _ = load_itk(join(training_dir, "dataset0%d/image0%d.mhd" % (dataset_idx, dataset_idx)))
    idxs = np.random.randint(300, len(points) - 300, batch_size)
    for idx in idxs:
        radius, direction = create_sample(idx, points, reference_points)

        point = world_to_voxel(points[idx, :3])
        patch = segment_image(image, point).copy()

        if patch.shape == (19, 19, 19):
            input_data.append(patch)
            probs.append(1.)
            radii.append(radius)
            directions.append(direction)

    input_data = np.asarray(input_data).reshape(-1, 19, 19, 19, 1)
    radii = np.asarray(radii).reshape(-1, 1)
    directions = np.asarray(directions).reshape(-1, 500)
    probs = np.asarray(probs).reshape(-1, 1)
    # print(input_data.shape, radii.shape, directions.shape, probs.shape)

    return input_data, [probs, radii, directions]
Exemplo n.º 4
0
def process_image(image_path, annotations, nodule, non_nodule, nodule_label,
                  non_nodule_label):
    image, origin, spacing = load_itk(image_path)  # 512 512 119
    image_name = os.path.split(image_path)[1].replace('.mhd', '')

    subset_name = image_path.split('/')[-2]
    #SH_path = '/data2/jhkim/npydata/' + subset_name + '/' + image_name + '.npy'
    #label_name = '/data2/jhkim/npydata/' + subset_name + '/' + image_name + '.label.npy'
    SH_path = '/data2/npydata/' + subset_name + '/' + image_name + '.npy'
    label_name = '/data2/npydata/' + subset_name + '/' + image_name + '.label.npy'

    # calculate resize factor
    resize_factor = spacing / OUTPUT_SPACING
    new_real_shape = image.shape * resize_factor
    new_shape = np.round(new_real_shape)
    real_resize = new_shape / image.shape
    new_spacing = spacing / real_resize

    image = np.transpose(np.load(SH_path))
    label = np.transpose(np.load(label_name))

    # image = normalize(image)
    # image = zero_center(image)

    # padding
    offset = patch_size // 2

    non_pad = image
    non_label_pad = label

    non_pad = np.pad(non_pad,
                     offset,
                     'constant',
                     constant_values=np.min(non_pad))
    non_label_pad = np.pad(non_label_pad,
                           offset,
                           'constant',
                           constant_values=np.min(non_label_pad))

    image = np.pad(image,
                   offset + (stride * move),
                   'constant',
                   constant_values=np.min(image))
    label = np.pad(label,
                   offset + (stride * move),
                   'constant',
                   constant_values=np.min(label))

    indices = annotations[annotations['seriesuid'] == image_name].index

    nodule_list = []
    nodule_label_list = []
    for i in indices:
        row = annotations.iloc[i]
        world_coords = np.array([row.coordX, row.coordY, row.coordZ])

        coords = np.floor(world_2_voxel(world_coords, origin,
                                        new_spacing)) + offset + (stride * move
                                                                  )  # center
        patch_stride(image, coords, offset,
                     nodule_list)  # x,y,z, xy,xz,yz, xyz ... get stride patch
        patch_stride(label,
                     coords,
                     offset,
                     nodule_label_list,
                     patch_flag=False)

    nodule_num = len(nodule_list)

    non_nodule_list = []
    non_nodule_label_list = []
    x_coords = non_pad.shape[0] - offset - 1
    y_coords = non_pad.shape[1] - offset - 1
    z_coords = non_pad.shape[2] - offset - 1

    while len(non_nodule_list) < 3 * nodule_num:
        rand_x = randint(offset, x_coords)
        rand_y = randint(offset, y_coords)
        rand_z = randint(offset, z_coords)

        coords = np.array([rand_x, rand_y, rand_z])

        get_patch(non_pad, coords, offset, non_nodule_list)
        get_patch(non_label_pad,
                  coords,
                  offset,
                  non_nodule_label_list,
                  patch_flag=False)

    nodule.extend(nodule_list)
    non_nodule.extend(non_nodule_list)
    nodule_label.extend(nodule_label_list)
    non_nodule_label.extend(non_nodule_label_list)

    print('nodule : ', np.shape(nodule))
    print('nodule_label : ', np.shape(nodule_label))
    print('non-nodule : ', np.shape(non_nodule))
    print('non-nodule_label : ', np.shape(non_nodule_label))
Exemplo n.º 5
0
def process_image(image_path, annotations, nodule, non_nodule, nodule_label, non_nodule_label):
    image, origin, spacing = load_itk(image_path)  # 512 512 119
    image_name = os.path.split(image_path)[1].replace('.mhd', '')

    subset_name = image_path.split('/')[-2]
    SH_path = '/data2/jhkim/npydata/' + subset_name + '/' + image_name + '.npy'
    label_name = '/data2/jhkim/npydata/' + subset_name + '/' + image_name + '.label.npy'

    # calculate resize factor
    resize_factor = spacing / OUTPUT_SPACING
    new_real_shape = image.shape * resize_factor
    new_shape = np.round(new_real_shape)
    real_resize = new_shape / image.shape
    new_spacing = spacing / real_resize

    image = np.transpose(np.load(SH_path))
    label = np.transpose(np.load(label_name))

    # image = normalize(image)
    # image = zero_center(image)

    # padding
    offset = patch_size // 2

    non_pad = image
    non_label_pad = label

    non_pad = np.pad(non_pad, offset, 'constant', constant_values=np.min(non_pad))
    non_label_pad = np.pad(non_label_pad, offset, 'constant', constant_values=np.min(non_label_pad))

    image = np.pad(image, offset + (stride * move), 'constant', constant_values=np.min(image))
    label = np.pad(label, offset + (stride * move), 'constant', constant_values=np.min(label))


    indices = annotations[annotations['seriesuid'] == image_name].index

    nodule_list = []
    nodule_label_list = []
    for i in indices:
        row = annotations.iloc[i]
        world_coords = np.array([row.coordX, row.coordY, row.coordZ])

        coords = np.floor(world_2_voxel(world_coords, origin, new_spacing)) + offset + (stride * move)  # center
        patch_stride(image, coords, offset, nodule_list)  # x,y,z, xy,xz,yz, xyz ... get stride patch
        patch_stride(label, coords, offset, nodule_label_list, patch_flag=False)

    nodule_num = len(nodule_list)

    non_nodule_list = []
    non_nodule_label_list = []
    x_coords = non_pad.shape[0] - offset - 1
    y_coords = non_pad.shape[1] - offset - 1
    z_coords = non_pad.shape[2] - offset - 1

    while len(non_nodule_list) < 3 * nodule_num:
        rand_x = randint(offset, x_coords)
        rand_y = randint(offset, y_coords)
        rand_z = randint(offset, z_coords)

        coords = np.array([rand_x, rand_y, rand_z])

        get_patch(non_pad, coords, offset, non_nodule_list)
        get_patch(non_label_pad, coords, offset, non_nodule_label_list, patch_flag=False)

    nodule.extend(nodule_list)
    non_nodule.extend(non_nodule_list)
    nodule_label.extend(nodule_label_list)
    non_nodule_label.extend(non_nodule_label_list)

    print('nodule : ', np.shape(nodule))
    print('nodule_label : ', np.shape(nodule_label))
    print('non-nodule : ', np.shape(non_nodule))
    print('non-nodule_label : ', np.shape(non_nodule_label))
Exemplo n.º 6
0
for folder in [
        'D:\jakubicek\Rot_detection\data_3d\Data_raw_test',
        'D:\jakubicek\Rot_detection\data_3d\Data_raw_train'
]:

    folder_save = folder
    folder_save = folder_save.replace('data_3d', 'data_3d_' + str(output_size))

    try:
        os.makedirs(folder_save)
    except:
        pass

    file_names = glob.glob(folder + '/*.mhd', recursive=True)

    for file_num, file_name in enumerate(file_names):

        file_name_save = file_name
        file_name_save = file_name_save.replace('data_3d',
                                                'data_3d_' + str(output_size))
        file_name_save = file_name_save.replace('.mhd', '.npy')

        ct_scan, origin, spacin = load_itk(file_name)

        factor = output_size_v / ct_scan.shape

        ct_scan_resized = zoom(ct_scan, factor)

        np.save(file_name_save, ct_scan_resized)