Пример #1
0
def get_statistics_voids(h5_volume_dir, dataset_name, scale=2):
    scale = float(scale)
    print("Calculating statistics voids")
    Volume = tensors_io.read_volume_h5(dataset_name, dataset_name, h5_volume_dir)
    Volume[np.where(Volume != 1)] = 0

    Volume = ndi.zoom(Volume, 1.0 / scale)
    Volume, num_voids = measure.label(Volume, return_num=True)
    Volume[np.where(Volume > 0)] += 1000000

    list_voids = fit_all_voids_parallel(Volume)
    for el in list_voids:
        f = open(h5_volume_dir + "/void_dictionary.txt", "w")
        for k in range(len(list_voids)):
            el = list_voids[k]
            if(el[-1] != -1):
                f.write("{},{:.0f},{:.0f},{:.0f},{:.2f}, {:.3f}, {:.4f}, {:.4f}, {:.4f}\n".format(el[0], int(scale) * el[1].item(), int(scale) * el[2].item(), int(scale) * el[3].item(), int(scale) * el[4].item(), int(scale) * int(scale) * int(scale) * el[5], el[6], el[7], el[8]))
        f.close()
    Volume = ndi.zoom(Volume, 2, order=0)

    Volume = Volume[:-1, :-1, :-1]
    Volume_fibers = tensors_io.read_volume_h5(dataset_name, dataset_name, h5_volume_dir)
    Volume_fibers[np.where(Volume_fibers == 1)] = Volume[np.where(Volume_fibers == 1)]

    tensors_io.save_volume_h5(Volume_fibers, name=dataset_name + '_labeled_voids', dataset_name=dataset_name + '_labeled_voids', directory=h5_volume_dir)

    try:
        read_dictionary_voids_volume(h5_volume_dir + "/void_statistics.txt")
    except:
        print("Volume plotting is not possible. Plot Manually")
Пример #2
0
def crop_volume(h5_volume_dir, dataset_name, dataset_name2, start=[600, 600, 80], window_size=100):
    print("Cropping volume")
    Volume = tensors_io.read_volume_h5(dataset_name, dataset_name, h5_volume_dir)
    print(len(np.where(Volume == 1)[0]))
    Volume = Volume[start[0]:start[0] + window_size,start[1]:start[1] + window_size, start[2]:start[2] + window_size]
    
    data_volume = tensors_io.read_volume_h5(dataset_name2, dataset_name2, h5_volume_dir)
    data_volume = data_volume[start[0]:start[0] + window_size,start[1]:start[1] + window_size,start[2]:start[2] + window_size]

    tensors_io.save_volume_h5(Volume, name=dataset_name + "_cropped", dataset_name=dataset_name + "_cropped", directory=h5_volume_dir)
    tensors_io.save_volume_h5((data_volume * 256).astype(np.int16), name= "volume_cropped", dataset_name= "volume_cropped", directory=h5_volume_dir)
Пример #3
0
def upsample_full_volume(h5_volume_dir, dataset_name, scale=2):
    print("Upsampling Volume")
    Volume = tensors_io.read_volume_h5(dataset_name, dataset_name, h5_volume_dir)
    slices = Volume.shape[-1]

    for slc in range(0, slices, 100):
        Volume = tensors_io.read_volume_h5(dataset_name, dataset_name, h5_volume_dir)
        Volume = Volume[..., slc: slc + 100]
        Volume = ndi.zoom(Volume, scale, order=0)

        if(slc == 0):
            tensors_io.save_volume_h5(Volume, directory=h5_volume_dir, name=dataset_name + "_full_resolution", dataset_name=dataset_name + "_full_resolution")
        else:
            tensors_io.append_volume_h5(Volume, directory=h5_volume_dir, name=dataset_name + "_full_resolution", dataset_name=dataset_name + "_full_resolution")
Пример #4
0
def downsample_volume(h5_volume_dir, dataset_name, scale=2):
    scale = float(scale)
    print("Downsampling volume")
    Volume = tensors_io.read_volume_h5(dataset_name, dataset_name, h5_volume_dir)
    print(Volume.shape)
    Volume = ndi.zoom(Volume, 1.0 / scale, order=0)
    tensors_io.save_volume_h5(Volume, name=dataset_name + "_small", dataset_name=dataset_name + "_small", directory=h5_volume_dir)
Пример #5
0
def get_statistics_fibers(h5_volume_dir, fibers_name, scale=2):
    print("Downsampling and calculating statistics voids")
    scale = float(scale)
    V_fibers = tensors_io.read_volume_h5(fibers_name, fibers_name, h5_volume_dir)
    V_fibers[np.where(V_fibers == 1)] = 0

    V_fibers = ndi.zoom(V_fibers, 1.0 / scale, order=0)

    list_fibers = fit_all_fibers_parallel(V_fibers)
    f = open("dict_fibers.txt","w")
    for k in range(len(list_fibers)):
        el = list_fibers[k]
        f.write("{},{:.0f},{:.0f},{:.0f},{:.2f},{:.2f},{:.0f},{:.0f}\n".format(el[0], scale * el[1][0], scale * el[1][1], scale * el[1][2], scale * el[2], scale * el[3], el[4], el[5]))
    f.close()
Пример #6
0
def find_neighboring_fibers(h5_volume_dir, dataset_name, window_size=50):
    print("Cropping volume")
    neighbor_dictionary = defaultdict(set)
    neighbor_overlap = []
    Volume = tensors_io.read_volume_h5(dataset_name, dataset_name, h5_volume_dir)
    rows, cols, slices = Volume.shape
    print(Volume.shape)
    start_x = []
    start_y = []
    start_z = []

    sz = 0
    while(sz + window_size < rows):
        start_x.append(sz)
        sz = sz + window_size

    sz = 0
    while(sz + window_size < cols):
        start_y.append(sz)
        sz = sz + window_size

    sz = 0
    while(sz + window_size < slices):
        start_z.append(sz)
        sz = sz + window_size

    num_partitions = len(start_z) * len(start_y) * len(start_x)
    counter = 0
    print("Starting Fitting")
    for x in start_x:
        for y in start_y:
            for z in start_z:
                print("Partition {} done out of {}".format(counter, num_partitions))
                temp_vol = Volume[x:x + window_size, y:y + window_size, z:z + window_size]
                find_n(temp_vol, neighbor_dictionary, neighbor_overlap)

                # np.save("neighbor_dictionary", neighbor_dictionary)
                counter += 1

    f = open( "./neighbor_elements2.txt", "w")
    for k in neighbor_dictionary.keys():
        el = neighbor_dictionary[k]
        f.write("{},".format(k))
        for it in el:
            f.write("{},".format(it))
        f.write("\n")
    f.close()
Пример #7
0
def fit_long_fibers(h5_volume_dir, dataset_name):
    print("Fitting Very Long Fibers")
    Volume = tensors_io.read_volume_h5(dataset_name, dataset_name, h5_volume_dir)
    complete_fiber_set = set()
    edge_fiber_set = set()
    with open("fiber_dictionary_CORRECTED_EDGE.txt") as f:
        for line in f.readlines():
            edge_fiber_set.add(int(line))
    print(edge_fiber_set)
    fiber_dictionary = dict()
    Volume = torch.from_numpy(Volume)
    list_fibers = get_fiber_properties_post_processing(Volume, offset=[0, 0, 0], complete_fibers=complete_fiber_set, edge_fiber_set={}, fibers_to_label=edge_fiber_set)

    for f_id in list_fibers.keys():
        fiber_dictionary[f_id] = list_fibers[f_id]

    f = open(h5_volume_dir + "/fiber_dictionary_CORRECTED_EDGES.txt", "w")
    for k in fiber_dictionary.keys():
        el = fiber_dictionary[k]
        f.write("{},{:.0f},{:.0f},{:.0f},{:.2f},{:.2f},{:.0f},{:.0f},{:.2f}\n".format(el[0], el[1], el[2], el[3], el[4], el[5], el[6], el[7], el[8]))
    f.close()
Пример #8
0
def crop_at_a_specific_fiber_debug(h5_volume_dir, dataset_name, fiber_id=2):
    print("Cropping volume")
    Volume = tensors_io.read_volume_h5(dataset_name, dataset_name, h5_volume_dir)
    coords = np.where(Volume == fiber_id)
    centers = [int(coords[i].mean()) for i in range(3)]
    maxs = [int(coords[i].max()) for i in range(3)]
    mins = [int(coords[i].min()) for i in range(3)]

    distances = [maxs[i] - mins[i] for i in range(3)]
    window_size = [1.2 * distances[i] for i in range(3)]
    min_coords_xyz = [int(max(0, mins[i])) for i in range(3)]
    max_coords_xyz = [int(min(Volume.shape[i], maxs[i])) for i in range(3)]

    print("Window Size: {}".format(window_size))
    print(fiber_id, centers)
    Volume = Volume[min_coords_xyz[0]:max_coords_xyz[0], min_coords_xyz[1]:max_coords_xyz[1], min_coords_xyz[2]:max_coords_xyz[2]]

    Volume_wrong = np.zeros(Volume.shape)
    Volume_small = np.zeros(Volume.shape)

    print("Fitting")
    list_fibers = fit_all_fibers_parallel(Volume)
    for k in list_fibers:
        if(k[0] == fiber_id):
            fiber_selected = k
        if(k[-1] > 200):
            Volume_wrong[np.where(Volume == k[0])] = k[0]
            print(k)

        if(k[-1] == -1):
            Volume_small[np.where(Volume == k[0])] = k[0]

    print(fiber_selected)
    tensors_io.save_volume_h5(Volume, name=dataset_name + "_cropped_single", dataset_name=dataset_name + "_cropped_single", directory=h5_volume_dir)
    tensors_io.save_volume_h5(Volume_wrong, name=dataset_name + "_wrong_fibers", dataset_name=dataset_name + "_wrong_fibers", directory=h5_volume_dir)
    tensors_io.save_volume_h5(Volume_small, name=dataset_name + "_small_fibers", dataset_name=dataset_name + "_wrong_fibers", directory=h5_volume_dir)
Пример #9
0
def crop_at_a_specific_fiber(h5_volume_dir, dataset_name, fiber_id=2):
    print("Cropping volume")
    Volume = tensors_io.read_volume_h5(dataset_name, dataset_name, h5_volume_dir)
    coords = np.where(Volume == fiber_id)
    centers = [int(coords[i].mean()) for i in range(3)]
    maxs = [int(coords[i].max()) for i in range(3)]
    mins = [int(coords[i].min()) for i in range(3)]

    distances = [maxs[i] - mins[i] for i in range(3)]
    window_size = [distances[i] for i in range(3)]
    min_coords_xyz = [int(max(0, mins[i] - 20)) for i in range(3)]
    max_coords_xyz = [int(min(Volume.shape[i], maxs[i] + 20)) for i in range(3)]

    Volume = torch.from_numpy(Volume)
    properties = get_fiber_properties_post_processing(Volume, offset=[0, 0, 0], complete_fibers={}, fibers_to_label={fiber_id})
    Volume = Volume.numpy()
    print(properties)
    print("Window Size: {}".format(window_size))
    print(fiber_id, centers)
    Volume = Volume[min_coords_xyz[0]:max_coords_xyz[0], min_coords_xyz[1]:max_coords_xyz[1], min_coords_xyz[2]:max_coords_xyz[2]]
    # idx = np.where(Volume == fiber_id)
    # Volume = Volume * 0
    # Volume[idx] = 1000000
    tensors_io.save_volume_h5(Volume, name=dataset_name + "_cropped_single", dataset_name=dataset_name + "_cropped_single", directory=h5_volume_dir)
Пример #10
0
def get_whole_volume_statistics(h5_volume_dir, dataset_name, window_size=500, device=None):
    print("Getting Fibers Statistics")
    if(device is None):
        device = torch.device("cuda:0")
    fiber_dictionary = {}
    edge_fiber_set = set()
    complete_fiber_set = set()
    Volume = tensors_io.read_volume_h5(dataset_name, dataset_name, h5_volume_dir)
    rows, cols, slices = Volume.shape
    print(Volume.shape)
    start_x = []
    start_y = []
    start_z = []

    sz = 0
    while(sz + window_size < rows):
        start_x.append(sz)
        sz = sz + window_size / 2

    sz = 0
    while(sz + window_size < cols):
        start_y.append(sz)
        sz = sz + window_size / 2

    sz = 0
    while(sz + window_size < slices):
        start_z.append(sz)
        sz = sz + window_size / 2

    num_partitions = len(start_z) * len(start_y) * len(start_x)
    counter = 0
    print("Starting Fitting")

    for x in start_x:
        for y in start_y:
            for z in start_z:
                print("Partition {} done out of {}".format(counter, num_partitions))
                temp_vol = Volume[x:x + window_size, y:y + window_size, z:z + window_size]
                temp_vol = torch.from_numpy(temp_vol).to(device)
                list_fibers = get_fiber_properties_post_processing(temp_vol, [x, y, z], complete_fibers=complete_fiber_set, edge_fiber_set=edge_fiber_set, vol_dim=Volume.shape)

                for f_id in list_fibers.keys():
                    fiber_is_complete = 1 - list_fibers[f_id][-1]

                    if(fiber_is_complete):
                        fiber_dictionary[f_id] = list_fibers[f_id]
                        complete_fiber_set.add(f_id)
                        if(f_id in edge_fiber_set):
                            edge_fiber_set.remove(f_id)
                    else:
                        if(f_id not in complete_fiber_set):
                            edge_fiber_set.add(f_id)
                            fiber_dictionary[f_id] = list_fibers[f_id]
                Volume[x:x + window_size, y:y + window_size, z:z + window_size] = temp_vol.cpu().numpy()
                print("Found Complete Fibers: {}. Edge Fibers {} ".format(len(complete_fiber_set), len(edge_fiber_set)))
                counter = counter + 1

                f = open(h5_volume_dir + "/fiber_dictionary.txt", "w")
                for k in fiber_dictionary.keys():
                    el = fiber_dictionary[k]
                    f.write("{},{:.0f},{:.0f},{:.0f},{:.2f},{:.2f},{:.0f},{:.0f},{:.2f}\n".format(el[0], el[1], el[2], el[3], el[4], el[5], el[6], el[7], el[8]))
                f.close()
                del temp_vol
                torch.cuda.empty_cache()

    torch.cuda.empty_cache()
    print("Saving Partition")
    tensors_io.save_volume_h5(Volume, name=dataset_name, dataset_name=dataset_name, directory=h5_volume_dir)
    print("Fitting Very Long Fibers")
    Volume = torch.from_numpy(Volume)
    list_fibers = get_fiber_properties_post_processing(Volume, offset=[0, 0, 0], complete_fibers=complete_fiber_set, edge_fiber_set={}, fibers_to_label=edge_fiber_set)

    for f_id in list_fibers.keys():
        fiber_dictionary[f_id] = list_fibers[f_id]

    f = open(h5_volume_dir + "/fiber_dictionary.txt", "w")
    for k in fiber_dictionary.keys():
        el = fiber_dictionary[k]
        f.write("{},{:.0f},{:.0f},{:.0f},{:.2f},{:.2f},{:.0f},{:.0f},{:.2f}\n".format(el[0], el[1], el[2], el[3], el[4], el[5], el[6], el[7], el[8]))
    f.close()
Пример #11
0
def crop_at_a_specific_fiber_neighbors(h5_volume_dir, dataset_name, neighbors_dir):
    print("Cropping volume")
    neighbors = read_neighbor_dictionary(neighbors_dir)

    fiber_dict = read_fiber_dictionary('h5_statistics/fiber_dictionary.txt')
    void_dictionary = read_fiber_dictionary('h5_statistics/void_dictionary2.txt')

    for k in neighbors.keys():
        if k > 800:
            fiber_id = k
            list_of_neighbors = neighbors[k]
            break

    Volume = tensors_io.read_volume_h5(dataset_name, dataset_name, h5_volume_dir)
    coords = np.where(Volume == fiber_id)
    centers = [int(coords[i].mean()) for i in range(3)]
    maxs = [int(coords[i].max()) for i in range(3)]
    mins = [int(coords[i].min()) for i in range(3)]

    distances = [maxs[i] - mins[i] for i in range(3)]
    window_size = [distances[i] for i in range(3)]
    min_coords_xyz = [int(max(0, mins[i] - 20)) for i in range(3)]
    max_coords_xyz = [int(min(Volume.shape[i], maxs[i] + 20)) for i in range(3)]


    Volume = Volume[min_coords_xyz[0]:max_coords_xyz[0], min_coords_xyz[1]:max_coords_xyz[1], min_coords_xyz[2]:max_coords_xyz[2]]
    Volume2 = np.zeros(Volume.shape)

    list_of_neighbors.append(fiber_id)
    void_list = []
    fiber_list = []
    for el in list_of_neighbors:
        idx = np.where(Volume == el)
        if(el == fiber_id):
            Volume2[idx] = 1
        elif(el > 1000000):
            Volume2[idx] = 2
            print("Void properties")
            if(el in void_dictionary.keys()):
                void_list.append(void_dictionary[el])
        else:
            Volume2[idx] = 3
            fiber_list.append(fiber_dict[el])

    print("Fiber properties")
    print(fiber_dict[fiber_id])
    '''
    for i in fiber_list:
        print(i)

    print("Viods")
    for i in void_list:
        print(i)
    '''
    fiber_list = np.array(fiber_list)
    void_list = np.array(void_list)

    lengths = fiber_list[:, 5]
    thetas = fiber_list[:, 6]
    phis = fiber_list[:, 7]


    for i in lengths:
        print(i, end=",")
    print("")
    for i in thetas:
        print(i, end=",")
    print("")
    for i in phis:
        print(i, end=",")
    print("")
    print(lengths.mean())
    print(thetas.mean())
    print(phis.mean())
    print(void_list[:, 5].mean())

    direction = np.array([void_list[:, 6].mean(), void_list[:, 7].mean(), void_list[:, 8].mean()])
    Txy = np.arctan2(direction[1], direction[0]) * 180 / np.pi
    if(Txy < 0):
        Txy = 180 + Txy
    Tz = np.arccos(np.dot(direction, np.array([0, 0, 1])) / np.linalg.norm(direction, 2)) * 180 / np.pi
    print(Txy)
    print(Tz)
    tensors_io.save_volume_h5(Volume2, name=dataset_name + "_cropped_single", dataset_name=dataset_name + "_cropped_single", directory=h5_volume_dir)