Example #1
0
def measure_denoising(n_datasets, ip_file_path):
    data_ids = range(n_datasets)

    ip_ports, ips = dh.read_ip_file(ip_file_path)

    tf_cluster = dt.TfCluster(ip_ports[:])

    datasets = []
    masks = []
    for data_id in data_ids:
        data, bvals_path, bvecs_path = dh.download(data_id)

        datasets.append(data)
        gtab = dpg.gradient_table(bvals_path, bvecs_path, b0_threshold=10)
        mean_b0 = np.mean(data[..., gtab.b0s_mask], -1)
        _, mask = median_otsu(mean_b0,
                              4,
                              2,
                              False,
                              vol_idx=np.where(gtab.b0s_mask),
                              dilate=1)
        masks.append(mask)

    with tf.Session("grpc://%s" % tf_cluster.host) as sess:
        dt.parallel_denoise(sess, tf_cluster, datasets, masks, depth=1)
Example #2
0
def measure_mask(n_datasets, ip_file_path):
    data_ids = range(n_datasets)

    ip_ports, ips = dh.read_ip_file(ip_file_path)

    tf_cluster = dt.TfCluster(ip_ports[:])

    filtered_data = []
    for data_id in data_ids:
        data, bvals_path, bvecs_path = dh.download(data_id)
        gtab = dpg.gradient_table(bvals_path, bvecs_path, b0_threshold=10)
        filtered_data.append(data[..., gtab.b0s_mask])

    with tf.Session("grpc://%s" % tf_cluster.host) as sess:
        print("\nMean\n")
        dt.parallel_mean(sess, tf_cluster, filtered_data)
Example #3
0
def end_to_end(n_datasets, ip_file_path, stride=8):
    time_total_start = time.time()

    data_ids = range(n_datasets)

    ip_ports, ips = dh.read_ip_file(ip_file_path)
    tf_cluster = dt.TfCluster(ip_ports[:])

    for data_ids_block in [
            data_ids[i:i + stride] for i in range(0, len(data_ids), stride)
    ]:
        with tf.Session("grpc://%s" % tf_cluster.host) as sess:
            time_iteration = time.time()
            end_to_end_iteration(sess, tf_cluster, data_ids_block)
            print("Time iteration: %.3fs" % (time.time() - time_iteration))
        tf.reset_default_graph()

    print("Time overall: %.3fs" % (time.time() - time_total_start))
Example #4
0
def measure_model_building(n_datasets, ip_file_path, stride=9):
    all_data_ids = range(n_datasets)

    for data_ids in [
            all_data_ids[i:i + stride]
            for i in range(0, len(all_data_ids), stride)
    ]:

        ip_ports, ips = dh.read_ip_file(ip_file_path)

        tf_cluster = dt.TfCluster(ip_ports[:])

        datasets = []
        masks = []
        gtabs = []
        for data_id in data_ids:
            data, bvals_path, bvecs_path = dh.download(data_id)

            datasets.append(data)
            gtab = dpg.gradient_table(bvals_path, bvecs_path, b0_threshold=10)
            mean_b0 = np.mean(data[..., gtab.b0s_mask], -1)
            _, mask = median_otsu(mean_b0,
                                  4,
                                  2,
                                  False,
                                  vol_idx=np.where(gtab.b0s_mask),
                                  dilate=1)
            masks.append(mask)
            gtabs.append(gtab)

        print("Start measurement")
        time_start = time.time()
        with tf.Session("grpc://%s" % tf_cluster.host) as sess:
            dt.parallel_modelbuilding(sess,
                                      tf_cluster,
                                      masks,
                                      datasets,
                                      gtabs,
                                      n_parts=64)
        print("Time iteration: %.3fs" % (time.time() - time_start))
Example #5
0
def measure_image_filtering(n_datasets, multiplicator, ip_file_path):
    data_ids = range(n_datasets)

    ip_ports, ips = dh.read_ip_file(ip_file_path)
    tf_cluster = dt.TfCluster(ip_ports)

    datasets = []
    path_sets = []
    for data_id in data_ids:
        data, bvals_path, bvecs_path = dh.download(data_id)
        datasets.append(data)
        print(datasets[-1].shape)
        path_sets.append([bvals_path, bvecs_path])

    with tf.Session("grpc://%s" % tf_cluster.host) as sess:
        print("\nImage filtering\n")
        dt.parallel_image_filtering(sess,
                                    tf_cluster,
                                    datasets,
                                    path_sets,
                                    multiplicator,
                                    n_parts=288)