Esempio n. 1
0
def single_gpu_inference(path, gpu, iteration, offset_folder):
    assert os.path.exists(path), path

    meta_graph = '/groups/saalfeld/home/papec/Work/my_projects/nnets/gunpowder-experiments/experiments/cremi-tf/unet_default/unet_checkpoint_%i' % iteration
    net_io_json = '/groups/saalfeld/home/papec/Work/my_projects/nnets/gunpowder-experiments/experiments/cremi-tf/unet_default/net_io_names.json'
    with open(net_io_json, 'r') as f:
        net_io_names = json.load(f)

    offset_file = os.path.join(os.path.split(os.path.realpath(__file__))[0],
                               offset_folder,
                               'list_gpu_%i.json' % gpu)
    with open(offset_file, 'r') as f:
        offset_list = json.load(f)

    input_key = net_io_names["raw"]
    output_key = net_io_names["affs"]
    input_shape = (84, 268, 268)
    output_shape = (56, 56, 56)
    prediction = TensorflowPredict(meta_graph,
                                   input_key=input_key,
                                   output_key=output_key)
    t_predict = time.time()
    run_inference_n5(prediction,
                     preprocess,
                     path, path,
                     offset_list,
                     input_shape=input_shape,
                     output_shape=output_shape,
                     input_key='gray')
    t_predict = time.time() - t_predict

    with open(os.path.join(path, 't-inf_gpu%i.txt' % gpu), 'w') as f:
        f.write("Inference with gpu %i in %f s" % (gpu, t_predict))
def single_gpu_inference(sample, gpu, iteration):
    raw_path = '/groups/saalfeld/saalfeldlab/larissa/data/cremi/cremi_warped_sample{0:}.n5/volumes'.format(
        sample)
    assert os.path.exists(
        raw_path), "Path to N5 dataset with raw data and mask does not exist"
    rf = z5py.File(raw_path, use_zarr_format=False)
    shape = rf['raw'].shape

    weight_meta_graph = '/nrs/saalfeld/heinrichl/synapses/cremi_all_0116_01/unet_checkpoint_{0:}'.format(
        iteration)
    inference_meta_graph = '/nrs/saalfeld/heinrichl/synapses/cremi_all_0116_01/unet_inference'
    net_io_json = '/nrs/saalfeld/heinrichl/synapses/cremi_all_0116_01/net_io_names.json'

    out_file = '/nrs/saalfeld/heinrichl/synapses/cremi_all_0116_01/'
    out_file = os.path.join(
        out_file,
        'prediction_cremi_warped_sample{0:}_{1:}.n5'.format(sample, iteration))
    assert os.path.exists(out_file)

    with open(net_io_json, 'r') as f:
        net_io_names = json.load(f)

    input_key = net_io_names["raw"]
    output_key = net_io_names["dist"]
    input_shape = (91, 862, 862)
    output_shape = (71, 650, 650)

    offset_file = '/nrs/saalfeld/heinrichl/synapses/cremi_all_0116_01/' \
                  'offsets_{0:}_{1:}_x{2:}_y{3:}_z{4:}/list_gpu_{5:}.json'.format(sample, iteration, output_shape[0],
                                                                             output_shape[1], output_shape[2], gpu)
    with open(offset_file, 'r') as f:
        offset_list = json.load(f)
    prediction = TensorflowPredict(weight_meta_graph,
                                   inference_meta_graph,
                                   input_key=input_key,
                                   output_key=output_key)

    t_predict = time.time()
    run_inference_n5(prediction,
                     preprocess,
                     partial(clip_float_to_uint8,
                             float_range=(-1, 1),
                             safe_scale=False),
                     raw_path,
                     out_file,
                     offset_list,
                     input_key='raw',
                     input_shape_wc=input_shape,
                     output_shape_wc=output_shape,
                     target_keys=('syncleft_dist'),
                     log_processed=os.path.join(
                         os.path.dirname(offset_file),
                         'list_gpu_{0:}_processed.txt'.format(gpu)))

    t_predict = time.time() - t_predict

    with open(
            os.path.join(os.path.dirname(offset_file),
                         't-inf_gpu{0:}.txt'.format(gpu)), 'w') as f:
        f.write("Inference with gpu {0:} in {1:} s\n".format(gpu, t_predict))
Esempio n. 3
0
def single_gpu_inference(path, data_eval, samples, gpu, iteration):
    weight_meta_graph = os.path.join(path,
                                     'unet_checkpoint_{0:}'.format(iteration))
    inference_meta_graph = os.path.join(path, 'unet_inference')
    net_io_json = os.path.join(path, 'net_io_names.json')
    with open(net_io_json, 'r') as f:
        net_io_names = json.load(f)

    input_key = net_io_names["raw"]
    output_key = [
        net_io_names["pre_dist"], net_io_names["post_dist"],
        net_io_names["cleft_dist"]
    ]
    input_shape = (91, 862, 862)
    output_shape = (71, 650, 650)

    prediction = TensorflowPredict(weight_meta_graph,
                                   inference_meta_graph,
                                   input_key=input_key,
                                   output_key=output_key)
    t_predict = time.time()
    for k, de in enumerate(data_eval):
        for s in samples:
            print('{0:} ({1:}/{2:}), {3:}'.format(de, k, len(data_eval), s))
            raw_file = '/groups/saalfeld/saalfeldlab/larissa/data/cremieval/{0:}/{1:}.n5'.format(
                de, s)
            out_file = os.path.join(
                path, 'evaluation/{0:}/{1:}/{2:}.n5'.format(iteration, de, s))
            offset_file = os.path.join(out_file,
                                       'list_gpu_{0:}.json'.format(gpu))
            with open(offset_file, 'r') as f:
                offset_list = json.load(f)
            run_inference_n5(prediction,
                             preprocess,
                             partial(clip_float_to_uint8,
                                     safe_scale=False,
                                     float_range=(-1, 1)),
                             raw_file,
                             out_file,
                             offset_list,
                             input_shape=input_shape,
                             output_shape=output_shape,
                             target_keys=('pre_dist', 'post_dist', 'clefts'),
                             input_key='volumes/raw',
                             log_processed=os.path.join(
                                 out_file,
                                 'list_gpu_{0:}processed.txt'.format(gpu)))
            t_predict = time.time() - t_predict

            with open(
                    os.path.join(
                        os.path.dirname(offset_file),
                        't-inf_gpu_{0:}_{1:}.txt'.format(gpu, iteration)),
                    'w') as f:
                f.write("Inference with gpu %i in %f s\n" % (gpu, t_predict))
Esempio n. 4
0
def single_gpu_inference(path, network_key, gpu, iteration):
    assert os.path.exists(path), path

    net_top_folder = '/groups/saalfeld/home/papec/Work/my_projects/nnets/gunpowder-experiments/new_experiments'
    net_sub_folder = 'experiments_unet_mala_mask_glia_predict_glia'
    net_folder = os.path.join(net_top_folder, net_sub_folder)
    assert os.path.exists(net_folder), net_folder

    prefix = networks[network_key]['prefix']
    graph_weights = os.path.join(net_folder,
                                 '%s_checkpoint_%i' % (prefix, iteration))
    # we don't use inference model for dtu2
    if network_key == 'dtu2':
        graph_inference = os.path.join(net_folder, prefix)
    else:
        graph_inference = os.path.join(net_folder, '%s_inference' % prefix)
    net_io_json = os.path.join(net_folder, 'net_io_names.json')

    with open(net_io_json, 'r') as f:
        net_io_names = json.load(f)

    offset_file = './offsets/list_gpu_%i.json' % gpu
    with open(offset_file, 'r') as f:
        offset_list = json.load(f)

    input_key = net_io_names["raw"]
    output_key = net_io_names["affs"]

    input_shape = networks[network_key]['input_shape']
    output_shape = networks[network_key]['output_shape']

    prediction = TensorflowPredict(graph_weights,
                                   graph_inference,
                                   input_key=input_key,
                                   output_key=output_key)

    target_key = 'predictions/affs_glia'
    t_predict = time.time()
    run_inference_n5(prediction,
                     preprocess,
                     clip_float_to_uint8,
                     path,
                     path,
                     offset_list,
                     input_shape=input_shape,
                     output_shape=output_shape,
                     input_key='gray',
                     target_keys=target_key,
                     num_cpus=10,
                     channel_order=[list(range(13))])
    t_predict = time.time() - t_predict

    with open(os.path.join(path, 't-inf_gpu%i.txt' % gpu), 'w') as f:
        f.write("Inference with gpu %i in %f s" % (gpu, t_predict))
Esempio n. 5
0
def single_gpu_inference(raw_path,
                         out_path,
                         net_folder,
                         gpu,
                         iteration):

    assert os.path.exists(raw_path)
    assert os.path.exists(net_folder)
    rf = z5py.File(raw_path, use_zarr_format=False)
    shape = rf['data'].shape
    input_shape = (84, 268, 268)
    output_shape = (56, 56, 56)

    # the n5 file might exist already
    if not os.path.exists(out_path):
        f = z5py.File(out_path, use_zarr_format=False)
        f.create_dataset('affs_xy', shape=shape,
                         compressor='gzip',
                         dtype='float32',
                         chunks=output_shape)
        f.create_dataset('affs_z', shape=shape,
                         compressor='gzip',
                         dtype='float32',
                         chunks=output_shape)

    # make offset list
    get_offset_lists(shape, [gpu], './offsets', output_shape=output_shape)

    meta_graph = os.path.join(net_folder, 'unet_checkpoint_%i' % iteration)
    net_io_json = os.path.join(net_folder, 'net_io_names.json')
    with open(net_io_json, 'r') as f:
        net_io_names = json.load(f)

    offset_file = './offsets/list_gpu_%i.json' % gpu
    with open(offset_file, 'r') as f:
        offset_list = json.load(f)

    input_key = net_io_names["raw"]
    output_key = net_io_names["affs"]
    prediction = TensorflowPredict(meta_graph,
                                   input_key=input_key,
                                   output_key=output_key)
    t_predict = time.time()
    run_inference_n5(prediction,
                     preprocess,
                     raw_path,
                     out_path,
                     offset_list,
                     input_shape_wc=input_shape,
                     output_shape_wc=output_shape)
    t_predict = time.time() - t_predict
    print("Running inference in %f s" % t_predict)
Esempio n. 6
0
def single_gpu_inference(sample, gpu, iteration):
    path = '/nrs/saalfeld/lauritzen/%s/workspace.n5/raw' % sample
    assert os.path.exists(path), path
    rf = z5py.File(path, use_zarr_format=False)
    shape = rf['gray'].shape
    weight_meta_graph = '/nrs/saalfeld/heinrichl/synapses/cremi_all_0116_01/unet_checkpoint_%i' % iteration
    inference_meta_graph = '/nrs/saalfeld/heinrichl/synapses/cremi_all_0116_01/unet_inference'
    net_io_json = '/nrs/saalfeld/heinrichl/synapses/cremi_all_0116_01/net_io_names.json'

    out_file = '/nrs/saalfeld/heinrichl/test/lauritzen/%s/workspace.n5' % sample
    with open(net_io_json, 'r') as f:
        net_io_names = json.load(f)

    mhash = hash(path)
    offset_file = '/nrs/saalfeld/heinrichl/synapses/scott_offsets_{0:}_DTU2_inf/offsets_{' \
                  '1:}/list_gpu_{2:}.json'.format(sample, mhash, gpu)
    with open(offset_file, 'r') as f:
        offset_list = json.load(f)

    input_key = net_io_names["raw"]
    output_key = net_io_names["dist"]
    input_shape = (91, 862, 862)
    output_shape = (71, 650, 650)
    prediction = TensorflowPredict(weight_meta_graph,
                                   inference_meta_graph,
                                   input_key=input_key,
                                   output_key=output_key)
    t_predict = time.time()
    run_inference_n5(prediction,
                     preprocess,
                     partial(threshold_cc,
                             thr=0.,
                             output_shape=output_shape,
                             ds_shape=shape),
                     path,
                     out_file,
                     offset_list,
                     input_shape_wc=input_shape,
                     output_shape_wc=output_shape,
                     target_keys=('syncleft_dist_DTU-2_{0:}'.format(iteration),
                                  'syncleft_cc_DTU-2_{0:}'.format(iteration)),
                     input_key='gray',
                     log_processed=os.path.join(
                         os.path.dirname(offset_file), 'list_gpu_{0:}_{'
                         '1:}_processed.txt'.format(gpu, iteration)))
    t_predict = time.time() - t_predict

    with open(
            os.path.join(os.path.dirname(offset_file),
                         't-inf_gpu_{0:}_{1:}.txt'.format(gpu, iteration)),
            'w') as f:
        f.write("Inference with gpu %i in %f s\n" % (gpu, t_predict))
Esempio n. 7
0
def single_gpu_inference(gpu, iteration):
    raw_path = '/groups/saalfeld/saalfeldlab/larissa/data/fib25/grayscale_sub.h5'
    save_folder = '/nrs/saalfeld/heinrichl/segmentation/distance_thirdtest/fib25_sub_prediction_at_%i' % iteration

    meta_graph = '/nrs/saalfeld/heinrichl/segmentation/distance_thirdtest/unet_checkpoint_%i' % iteration
    net_io_json = '/nrs/saalfeld/heinrichl/segmentation/distance_thirdtest/net_io_names.json'
    offset_file = '/nrs/saalfeld/heinrichl/segmentation/distance_thirdtest/offsets/list_gpu_%i.json' % gpu
    with open(offset_file, 'r') as f:
        offset_list = json.load(f)
    with open(net_io_json, 'r') as f:
        net_io_names = json.load(f)

    prediction = TensorflowPredict(meta_graph, net_io_names['raw'], net_io_names['dist'])
    t_predict = time.time()
    run_inference(prediction, preprocess, raw_path, save_folder, offset_list, output_shape=(44,)*3, input_shape=(132,
                                                                                                                 )*3,
                  rejection_criterion=reject_empty_batch, padding_mode='constant')
    t_predict = time.time() - t_predict

    with open(os.path.join(save_folder, 't-inf_gpu%i.txt' % gpu), 'w') as f:
        f.write("Inference with gpu %i in %f s" % (gpu, t_predict))
    TensorflowPredict.stop()
Esempio n. 8
0
def single_gpu_inference(gpu, iteration, gpu_offset):
    # path to the raw data
    raw_path = '/nrs/saalfeld/sample_E/sample_E.n5'
    path_in_file = 'volumes/raw/s0'

    save_path = '/nrs/saalfeld/sample_E/sample_E.n5'

    net_folder = '/groups/saalfeld/home/papec/Work/my_projects/nnets/gunpowder-experiments/experiments/cremi-tf'
    graph_weights = os.path.join(net_folder,
                                 'unet_default/unet_checkpoint_%i' % iteration)
    graph_inference = os.path.join(net_folder, 'unet_default/unet_inference')
    net_io_json = os.path.join(net_folder, 'unet_default/net_io_names.json')
    with open(net_io_json, 'r') as f:
        net_io_names = json.load(f)

    offset_file = './offset_lists/list_gpu_%i.json' % (gpu + gpu_offset, )
    with open(offset_file, 'r') as f:
        offset_list = json.load(f)

    input_shape = (88, 808, 808)
    output_shape = (60, 596, 596)

    input_key = net_io_names["raw"]
    output_key = net_io_names["affs"]
    prediction = TensorflowPredict(graph_weights,
                                   graph_inference,
                                   input_key=input_key,
                                   output_key=output_key)
    t_predict = time.time()
    run_inference_n5(prediction,
                     preprocess,
                     float_to_uint8,
                     raw_path,
                     save_path,
                     offset_list,
                     input_shape,
                     output_shape,
                     input_key=path_in_file,
                     target_keys=['volumes/predictions/full_affs'],
                     num_cpus=10,
                     channel_order=[list(range(12))],
                     log_processed='./processed_gpu_%i.txt' %
                     (gpu + gpu_offset))

    t_predict = time.time() - t_predict

    with open(
            os.path.join(save_path, 't-inf_gpu%i.txt' % (gpu + gpu_offset, )),
            'w') as f:
        f.write("Inference with gpu %i in %f s" % (gpu, t_predict))
Esempio n. 9
0
def single_gpu_inference(config, gpu):

    offset_file = os.path.join(
        config['meta_path'],
        'list_gpu_{0:}{1:}.json'.format(gpu,
                                        config['offset_list_name_extension']))
    with open(offset_file, 'r') as f:
        offset_list = json.load(f)
    print(config['input_key'], config['output_key'])
    prediction = TensorflowPredict(config['weight_meta_graph'],
                                   config['inference_meta_graph'],
                                   input_key=config['input_key'],
                                   output_key=config['output_key'])
    if 'postprocess' in config:
        if config['postprocess'] == 'clip_float32_to_uint8_range_0_1':
            postprocess = clip_float32_to_uint8_range_0_1
        elif config['postprocess'] == 'clip_float32_to_uint8' or config['postprocess'] == \
                'clip_float32_to_uint8_range': #todo remove this dirty bug hack fix
            postprocess = clip_float32_to_uint8
    else:
        postprocess = clip_float32_to_uint8
    t_predict = time.time()
    run_inference_n5(prediction,
                     preprocess,
                     postprocess,
                     config['raw_path'],
                     config['out_file'],
                     offset_list,
                     input_key=config['data_key'],
                     input_shape=config['input_shape'],
                     output_shape=config['output_shape'],
                     target_keys=config['target_keys'],
                     log_processed=os.path.join(
                         os.path.dirname(offset_file),
                         'list_gpu_{0:}{1:}_processed.txt'.format(
                             gpu, config['offset_list_name_extension'])))

    t_predict = time.time() - t_predict

    with open(
            os.path.join(os.path.dirname(offset_file),
                         't-inf_gpu{0:}.txt'.format(gpu)), 'w') as f:
        f.write("Inference with gpu {0:} in {1:} s\n".format(gpu, t_predict))
def single_gpu_inference(sample, gpu, iteration):
    raw_path = '/groups/saalfeld/home/papec/Work/neurodata_hdd/cremi_warped/sample%s.n5' % sample
    assert os.path.exists(raw_path), raw_path
    out_file = '/groups/saalfeld/home/papec/Work/neurodata_hdd/cremi_warped/sample%s.n5' % sample
    assert os.path.exists(out_file), out_file

    net_folder = '/groups/saalfeld/home/papec/Work/my_projects/nnets/gunpowder-experiments/experiments/cremi-tf'
    graph_weights = os.path.join(net_folder,
                                 'unet_default/unet_checkpoint_%i' % iteration)
    graph_inference = os.path.join(net_folder, 'unet_default/unet_inference')
    net_io_json = os.path.join(net_folder, 'unet_default/net_io_names.json')
    with open(net_io_json, 'r') as f:
        net_io_names = json.load(f)

    offset_file = './offsets_sample%s/list_gpu_%i.json' % (sample, gpu)
    with open(offset_file, 'r') as f:
        offset_list = json.load(f)

    input_key = net_io_names["raw"]
    output_key = net_io_names["affs"]
    input_shape = (88, 808, 808)
    output_shape = (60, 596, 596)
    prediction = TensorflowPredict(graph_weights,
                                   graph_inference,
                                   input_key=input_key,
                                   output_key=output_key)
    t_predict = time.time()
    run_inference_n5(prediction,
                     preprocess,
                     float_to_uint8,
                     raw_path,
                     out_file,
                     offset_list,
                     input_key='raw',
                     target_keys='predictions/full_affs',
                     input_shape_wc=input_shape,
                     output_shape_wc=output_shape,
                     channel_order=[list(range(12))])
    t_predict = time.time() - t_predict

    with open(os.path.join(out_file, 't-inf_gpu%i.txt' % gpu), 'w') as f:
        f.write("Inference with gpu %i in %f s" % (gpu, t_predict))
def single_gpu_inference(path, gpu, iteration):

    assert os.path.exists(path), path
    out_file = '/data/heinrichl/sample_E.n5'
    weight_meta_graph = '/nrs/saalfeld/heinrichl/synapses/cremi_all_0116_01/unet_checkpoint_%i' % iteration
    inference_meta_graph = '/nrs/saalfeld/heinrichl/synapses/cremi_all_0116_01/unet_inference'
    net_io_json = '/nrs/saalfeld/heinrichl/synapses/cremi_all_0116_01/net_io_names.json'

    with open(net_io_json, 'r') as f:
        net_io_names = json.load(f)

    offset_file = '/nrs/saalfeld/heinrichl/synapses/sampleE_DTU2_offsets_update/list_gpu_{0:}.json'.format(
        gpu)
    with open(offset_file, 'r') as f:
        offset_list = json.load(f)

    input_key = net_io_names["raw"]
    output_key = net_io_names["dist"]
    input_shape = (91, 862, 862)
    output_shape = (71, 650, 650)
    prediction = TensorflowPredict(weight_meta_graph,
                                   inference_meta_graph,
                                   input_key=input_key,
                                   output_key=output_key)
    t_predict = time.time()
    run_inference_n5(
        prediction,
        preprocess,
        path,
        out_file,
        offset_list,
        input_shape_wc=input_shape,
        output_shape_wc=output_shape,
        target_keys='syncleft_dist_DTU-2_{0:}'.format(iteration),
        input_key='volumes/raw/s0',
        log_processed=
        '/nrs/saalfeld/heinrichl/synapses/sampleE_DTU2_offsets_update/list_gpu_{'
        '0:}_processed.txt'.format(gpu))
    t_predict = time.time() - t_predict

    with open(os.path.join(out_file, 't-inf_gpu%i.txt' % gpu), 'w') as f:
        f.write("Inference with gpu %i in %f s" % (gpu, t_predict))
Esempio n. 12
0
def single_gpu_inference(path, gpu, iteration):
    assert os.path.exists(path), path

    net_folder = '/groups/saalfeld/home/papec/Work/my_projects/nnets/gunpowder-experiments/experiments/cremi-tf'
    graph_weights = os.path.join(net_folder,
                                 'unet_default/unet_checkpoint_%i' % iteration)
    graph_inference = os.path.join(net_folder, 'unet_default/unet_inference')
    net_io_json = os.path.join(net_folder, 'unet_default/net_io_names.json')
    with open(net_io_json, 'r') as f:
        net_io_names = json.load(f)

    mhash = hashlib.md5(path.encode('utf-8')).hexdigest()
    offset_file = './offsets_%s/list_gpu_%i.json' % (mhash, gpu)
    with open(offset_file, 'r') as f:
        offset_list = json.load(f)

    input_key = net_io_names["raw"]
    output_key = net_io_names["affs"]
    input_shape = (88, 808, 808)
    output_shape = (60, 596, 596)
    prediction = TensorflowPredict(graph_weights,
                                   graph_inference,
                                   input_key=input_key,
                                   output_key=output_key)

    t_predict = time.time()
    run_inference_n5(prediction,
                     preprocess,
                     path,
                     path,
                     offset_list,
                     input_shape_wc=input_shape,
                     output_shape_wc=output_shape,
                     input_key='gray',
                     target_keys='predictions/full_affs',
                     full_affinities=True)
    t_predict = time.time() - t_predict

    with open(os.path.join(path, 't-inf_gpu%i.txt' % gpu), 'w') as f:
        f.write("Inference with gpu %i in %f s" % (gpu, t_predict))
Esempio n. 13
0
def single_gpu_inference(data_train, augmentation, data_eval, samples, gpu,
                         iteration):
    path = os.path.join(
        config_loader.get_config()["synapses"]["training_setups_path"],
        "data_and_augmentations/{0:}/{1:}".format(data_train, augmentation))
    weight_meta_graph = os.path.join(path,
                                     "unet_checkpoint_{0:}".format(iteration))
    inference_meta_graph = os.path.join(path, "unet_inference")
    net_io_json = os.path.join(path, "net_io_names.json")
    with open(net_io_json, "r") as f:
        net_io_names = json.load(f)

    input_key = net_io_names["raw"]
    output_key = [
        net_io_names["pre_dist"],
        net_io_names["post_dist"],
        net_io_names["cleft_dist"],
    ]
    input_shape = (91 * 40, 862 * 4, 862 * 4)
    output_shape = (71 * 40, 650 * 4, 650 * 4)

    prediction = TensorflowPredict(
        weight_meta_graph,
        inference_meta_graph,
        input_keys=input_key,
        output_keys=output_key,
    )
    t_predict = time.time()
    for k, de in enumerate(data_eval):
        for s in samples:
            print("{0:} ({1:}/{2:}), {3:}".format(de, k, len(data_eval), s))
            raw_file = os.path.join(
                config_loader.get_config()["synapses"]["cremieval_path"],
                "{0:}/{1:}.n5".format(de, s))
            out_file = os.path.join(
                config_loader.get_config()["synapses"]["cremieval_path"],
                "data_and_augmentations/{0:}/{1:}/evaluation/{2:}/{3:}/{4:}.n5"
                .format(data_train, augmentation, iteration, de, s))
            offset_file = os.path.join(out_file,
                                       "list_gpu_{0:}.json".format(gpu))
            with open(offset_file, "r") as f:
                offset_list = json.load(f)
            run_inference_zarr(prediction,
                               preprocess,
                               partial(clip_float_to_uint8,
                                       safe_scale=False,
                                       float_range=(-1, 1)),
                               raw_file,
                               out_file,
                               offset_list,
                               input_shape_wc=input_shape,
                               output_shape_wc=output_shape,
                               target_keys=("pre_dist", "post_dist", "clefts"),
                               input_key="volumes/raw",
                               input_resolution=(40, 4, 4),
                               target_resolution=(40, 4, 4),
                               log_processed=os.path.join(
                                   out_file,
                                   "list_gpu_{0:}processed.txt".format(gpu)))
            t_predict = time.time() - t_predict

            with open(
                    os.path.join(
                        os.path.dirname(offset_file),
                        "t-inf_gpu_{0:}_{1:}.txt".format(gpu, iteration),
                    ),
                    "w",
            ) as f:
                f.write("Inference with gpu %i in %f s\n" % (gpu, t_predict))
Esempio n. 14
0
def single_gpu_inference(data_train, augmentation, data_eval, samples, gpu,
                         iteration):
    path = "/nrs/saalfeld/heinrichl/synapses/data_and_augmentations/{0:}/{1:}".format(
        data_train, augmentation)
    weight_meta_graph = os.path.join(path,
                                     "unet_checkpoint_{0:}".format(iteration))
    inference_meta_graph = os.path.join(path, "unet_inference")
    net_io_json = os.path.join(path, "net_io_names.json")
    with open(net_io_json, "r") as f:
        net_io_names = json.load(f)

    input_key = net_io_names["raw"]
    output_key = [
        net_io_names["pre_dist"],
        net_io_names["post_dist"],
        net_io_names["cleft_dist"],
    ]
    input_shape = (91, 862, 862)
    output_shape = (71, 650, 650)

    prediction = TensorflowPredict(
        weight_meta_graph,
        inference_meta_graph,
        input_key=input_key,
        output_key=output_key,
    )
    t_predict = time.time()
    for k, de in enumerate(data_eval):
        for s in samples:
            print("{0:} ({1:}/{2:}), {3:}".format(de, k, len(data_eval), s))
            raw_file = "/groups/saalfeld/saalfeldlab/larissa/data/cremieval/{0:}/{1:}.n5".format(
                de, s)
            out_file = (
                "/nrs/saalfeld/heinrichl/synapses/data_and_augmentations/{0:}/{"
                "1:}/evaluation/{2:}/{3:}/{4:}.n5".format(
                    data_train, augmentation, iteration, de, s))
            offset_file = os.path.join(out_file,
                                       "list_gpu_{0:}.json".format(gpu))
            with open(offset_file, "r") as f:
                offset_list = json.load(f)
            run_inference_n5(
                prediction,
                preprocess,
                partial(clip_float_to_uint8,
                        safe_scale=False,
                        float_range=(-1, 1)),
                raw_file,
                out_file,
                offset_list,
                input_shape=input_shape,
                output_shape=output_shape,
                target_keys=("pre_dist", "post_dist", "clefts"),
                input_key="volumes/raw",
                log_processed=os.path.join(
                    out_file, "list_gpu_{0:}processed.txt".format(gpu)),
            )
            t_predict = time.time() - t_predict

            with open(
                    os.path.join(
                        os.path.dirname(offset_file),
                        "t-inf_gpu_{0:}_{1:}.txt".format(gpu, iteration),
                    ),
                    "w",
            ) as f:
                f.write("Inference with gpu %i in %f s\n" % (gpu, t_predict))
def single_job_inference(job_no,
                         raw_data_path,
                         sigma,
                         raw_ds,
                         setup_path,
                         output_path=None,
                         factor=None,
                         min_sc=None,
                         max_sc=None,
                         float_range=(-1, 1),
                         safe_scale=False,
                         n_cpus=5):

    output_dir, out_file = get_output_paths(raw_data_path, setup_path,
                                            output_path)
    offset_file = os.path.join(out_file, "list_gpu_{0:}.json".format(job_no))
    if not os.path.exists(offset_file):
        return

    with open(offset_file, 'r') as f:
        offset_list = json.load(f)

    rf = zarr.open(raw_data_path, mode="r")
    shape_vc = rf[raw_ds].shape
    weight_meta_graph = os.path.join(setup_path,
                                     "blur_{0:}".format(float(sigma)))
    inference_meta_graph = os.path.join(setup_path,
                                        "blur_{0:}".format(float(sigma)))

    net_io_json = os.path.join(setup_path, "net_io_names.json")
    with open(net_io_json, "r") as f:
        net_io_names = json.load(f)

    shapes_file = os.path.join(
        setup_path, "shapes_steps_{0:}x{1:}x{2:}.json".format(*size))
    with open(shapes_file, "r") as f:
        shapes = json.load(f)
    input_shape_vc, output_shape_vc, chunk_shape_vc = \
        shapes["input_shape_vc"], shapes["output_shape_vc"], shapes["chunk_shape_vc"]

    input_key = net_io_names["raw_input"]
    network_output_keys = net_io_names["output"]
    dataset_target_keys = ["raw_blurred"]

    input_shape_wc = Coordinate(input_shape_vc) * voxel_size
    output_shape_wc = Coordinate(output_shape_vc) * voxel_size
    chunk_shape_wc = Coordinate(chunk_shape_vc) * voxel_size

    prediction = TensorflowPredict(weight_meta_graph,
                                   inference_meta_graph,
                                   input_keys=input_key,
                                   output_keys=network_output_keys,
                                   has_trained_variables=False)

    t_predict = time.time()

    factor, scale, shift = get_contrast_adjustment(rf, raw_ds, factor, min_sc,
                                                   max_sc)

    run_inference_zarr_multi_crop(
        prediction,
        functools.partial(preprocess,
                          factor=1. / factor,
                          scale=scale,
                          shift=shift),
        functools.partial(clip_float_to_uint8,
                          float_range=float_range,
                          safe_scale=safe_scale),
        raw_data_path,
        out_file,
        offset_list,
        network_input_shapes_wc=[
            input_shape_wc,
        ],
        network_output_shape_wc=output_shape_wc,
        chunk_shape_wc=chunk_shape_wc,
        input_keys=[
            raw_ds,
        ],
        target_keys=dataset_target_keys,
        input_resolutions=[
            tuple(voxel_size),
        ],
        target_resolutions=[
            tuple(voxel_size),
        ] * len(dataset_target_keys),
        log_processed=os.path.join(
            os.path.dirname(offset_file),
            "list_gpu_{0:}_{1:}_processed.txt".format(job_no, sigma)),
        pad_value=int(round(-factor * (shift / scale))),
        num_cpus=n_cpus)

    t_predict = time.time() - t_predict
Esempio n. 16
0
def single_job_inference(job_no: int,
                         raw_data_path: str,
                         iteration: int,
                         raw_ds: str,
                         setup_path: str,
                         output_path: Optional[str] = None,
                         factor: Optional[int] = None,
                         min_sc: Optional[int] = None,
                         max_sc: Optional[int] = None,
                         float_range: Tuple[int, int] = (-1, 1),
                         safe_scale: bool = False,
                         n_cpus: int = 5) -> None:
    """
    Run an inference job.

    Args:
        job_no: Id for the inference job.
        raw_data_path: Path to n5 container that contains raw data.
        iteration: Iteration to pull inference for.
        raw_ds: Dataset in n5 container (`raw_data_path`) for raw data.
        setup_path: Path containing setup.
        output_path: N5 container to save output to, autogenerated if None.
        factor: Factor to normalize raw data by, tried to infer from datatype if None.
        min_sc: Minimum intensity (mapped to -1)
        max_sc: Maximum intensity (mapped to 1)
        float_range: Range of output floats for conversion to uint8.
        safe_scale: If True, values are scaled such that all values within `float_range` fall within (0, 255).
            and are not cropped.  If False, values at the lower end of `float_range` may be scaled to < 0 and then
            cropped to 0.
        n_cpus: Number of cpus to use per job.
    """
    sys.path.append(setup_path)
    import unet_template
    output_dir, out_file = get_output_paths(raw_data_path, setup_path,
                                            output_path, iteration)
    offset_file = os.path.join(out_file, "list_gpu_{0:}.json".format(job_no))
    if not os.path.exists(offset_file):
        return

    with open(offset_file, 'r') as f:
        offset_list = json.load(f)

    rf = zarr.open(raw_data_path, mode="r")
    shape_vc = rf[raw_ds].shape
    weight_meta_graph = os.path.join(
        setup_path, "unet_train_checkpoint_{0:}".format(iteration))
    inference_meta_graph = os.path.join(setup_path, "unet_inference")

    net_io_json = os.path.join(setup_path, "net_io_names.json")
    with open(net_io_json, "r") as f:
        net_io_names = json.load(f)

    shapes_file = os.path.join(
        setup_path,
        "shapes_steps{0:}.json".format(unet_template.steps_inference))
    with open(shapes_file, "r") as f:
        shapes = json.load(f)
    input_shape_vc, output_shape_vc, chunk_shape_vc = \
        shapes["input_shape_vc"], shapes["output_shape_vc"], shapes["chunk_shape_vc"]

    input_key = net_io_names["raw"]
    network_output_keys = []
    dataset_target_keys = []

    for label in unet_template.labels:
        network_output_keys.append(net_io_names[label.labelname])
        dataset_target_keys.append(label.labelname)

    voxel_size_input = unet_template.voxel_size_input
    voxel_size_output = unet_template.voxel_size

    input_shape_wc = Coordinate(input_shape_vc) * voxel_size_input
    output_shape_wc = Coordinate(output_shape_vc) * voxel_size_output
    chunk_shape_wc = Coordinate(chunk_shape_vc) * voxel_size_output

    prediction = TensorflowPredict(weight_meta_graph,
                                   inference_meta_graph,
                                   input_keys=input_key,
                                   output_keys=network_output_keys)

    factor, scale, shift = get_contrast_adjustment(rf, raw_ds, factor, min_sc,
                                                   max_sc)
    t_predict = time.time()
    run_inference_zarr_multi_crop(
        prediction,
        functools.partial(preprocess,
                          factor=1. / factor,
                          scale=scale,
                          shift=shift),
        functools.partial(clip_float_to_uint8,
                          float_range=float_range,
                          safe_scale=safe_scale),
        raw_data_path,
        out_file,
        offset_list,
        network_input_shapes_wc=[
            input_shape_wc,
        ],
        network_output_shape_wc=output_shape_wc,
        chunk_shape_wc=chunk_shape_wc,
        input_keys=[
            raw_ds,
        ],
        target_keys=dataset_target_keys,
        input_resolutions=[
            tuple(voxel_size_input),
        ],
        target_resolutions=[
            tuple(voxel_size_output),
        ] * len(dataset_target_keys),
        log_processed=os.path.join(
            os.path.dirname(offset_file),
            "list_gpu_{0:}_{1:}_processed.txt".format(job_no, iteration)),
        pad_value=int(round(-factor * (shift / scale))),
        num_cpus=n_cpus)

    t_predict = time.time() - t_predict
    logging.info("completed {0:} blocks in {1:}s.".format(
        len(offset_list), t_predict))
Esempio n. 17
0
def single_gpu_inference(gpu, iteration, list_extension=''):
    raw_path = '/groups/saalfeld/saalfeldlab/FAFB00/v14_align_tps_20170818_dmg.n5/volumes/raw/'
    assert os.path.exists(raw_path), raw_path
    weight_meta_graph = '/nrs/saalfeld/heinrichl/synapses/cremi_all_0116_01/unet_checkpoint_%i' % iteration
    inference_meta_graph = '/nrs/saalfeld/heinrichl/synapses/cremi_all_0116_01/unet_inference'
    net_io_json = '/nrs/saalfeld/heinrichl/synapses/cremi_all_0116_01/net_io_names.json'
    with open(net_io_json, 'r') as f:
        net_io_names = json.load(f)

    out_file = '/nrs/saalfeld/FAFB00/v14_align_tps_20170818_dmg.n5/volumes/predictions/synapses_dt'
    metadata_folder = '/nrs/saalfeld/heinrichl/fafb_meta/'
    offset_file = os.path.join(
        metadata_folder, 'list_gpu_{0:}{1:}.json'.format(gpu, list_extension))
    processed_file = os.path.join(
        metadata_folder,
        'list_gpu_{0:}_processed{1:}.txt'.format(gpu, list_extension))

    with open(offset_file, 'r') as f:
        offset_list = json.load(f)

    if os.path.exists(processed_file):
        curated_processed_file = curate_processed_list(processed_file)
        with open(curated_processed_file, 'r') as f:
            processed_list = json.load(f)[:-1]
            processed_list_set = set(tuple(coo) for coo in processed_list)
    else:
        processed_list_set = set()

    offset_list_set = set(tuple(coo) for coo in offset_list)

    if processed_list_set == offset_list_set:
        print("processing was complete")
        return
    assert processed_list_set < offset_list_set
    offset_list = [list(coo) for coo in offset_list_set - processed_list_set]
    input_key = net_io_names["raw"]
    output_key = net_io_names["dist"]
    input_shape = (91, 862, 862)
    output_shape = (71, 650, 650)
    prediction = TensorflowPredict(weight_meta_graph,
                                   inference_meta_graph,
                                   input_key=input_key,
                                   output_key=output_key)
    t_predict = time.time()
    run_inference_n5(prediction,
                     preprocess,
                     partial(clip_float_to_uint8,
                             float_range=(-1, 1),
                             safe_scale=False),
                     raw_path,
                     out_file,
                     offset_list,
                     input_shape=input_shape,
                     output_shape=output_shape,
                     target_keys='s0',
                     input_key='s0',
                     log_processed=processed_file)
    t_predict = time.time() - t_predict

    with open(
            os.path.join(metadata_folder,
                         't-inf_gpu_{0:}{1:}.txt'.format(gpu, list_extension)),
            'w') as f:
        f.write("Inference with gpu %i in %f s\n" % (gpu, t_predict))
Esempio n. 18
0
def single_job_inference(job_no,
                         raw_data_path,
                         iteration,
                         raw_ds,
                         setup_path,
                         output_path=None,
                         factor=None,
                         min_sc=None,
                         max_sc=None,
                         float_range=(-1, 1),
                         safe_scale=False,
                         n_cpus=5):
    sys.path.append(setup_path)
    import setup_config
    output_dir, out_file = get_output_paths(raw_data_path, setup_path,
                                            output_path)
    offset_file = os.path.join(out_file, "list_gpu_{0:}.json".format(job_no))
    if not os.path.exists(offset_file):
        return

    with open(offset_file, 'r') as f:
        offset_list = json.load(f)

    rf = zarr.open(raw_data_path, mode="r")
    shape_vc = rf[raw_ds].shape
    weight_meta_graph = os.path.join(
        setup_path,
        "{0:}_train_checkpoint_{1:}".format(setup_config.network_name,
                                            iteration))
    inference_meta_graph = os.path.join(
        setup_path, "{0:}_inference".format(setup_config.network_name))

    net_io_json = os.path.join(
        setup_path, "{0:}_io_names.json".format(setup_config.network_name))
    with open(net_io_json, "r") as f:
        net_io_names = json.load(f)

    shapes_file = os.path.join(
        setup_path,
        "shapes_steps{0:}.json".format(setup_config.steps_inference))
    with open(shapes_file, "r") as f:
        shapes = json.load(f)
    input_shape_vc, output_shape_vc, chunk_shape_vc = \
        shapes["input_shape_vc"], shapes["output_shape_vc"], shapes["chunk_shape_vc"]

    input_key = net_io_names[setup_config.input_name]
    network_output_keys = []
    dataset_target_keys = []

    for out_name in setup_config.output_names:
        network_output_keys.append(net_io_names[out_name + "_predicted"])
        dataset_target_keys.append(out_name + "_predicted")

    voxel_size_input = setup_config.voxel_size
    voxel_size_output = setup_config.voxel_size

    input_shape_wc = Coordinate(input_shape_vc) * voxel_size_input
    output_shape_wc = Coordinate(output_shape_vc) * voxel_size_output
    chunk_shape_wc = Coordinate(chunk_shape_vc) * voxel_size_output

    prediction = TensorflowPredict(weight_meta_graph,
                                   inference_meta_graph,
                                   input_keys=input_key,
                                   output_keys=network_output_keys)

    t_predict = time.time()

    factor, scale, shift = get_contrast_adjustment(rf, raw_ds, factor, min_sc,
                                                   max_sc)

    run_inference_zarr_multi_crop(
        prediction,
        functools.partial(preprocess,
                          factor=1. / factor,
                          scale=scale,
                          shift=shift),
        functools.partial(clip_float_to_uint8,
                          float_range=float_range,
                          safe_scale=safe_scale),
        raw_data_path,
        out_file,
        offset_list,
        network_input_shapes_wc=[
            input_shape_wc,
        ],
        network_output_shape_wc=output_shape_wc,
        chunk_shape_wc=chunk_shape_wc,
        input_keys=[
            raw_ds,
        ],
        target_keys=dataset_target_keys,
        input_resolutions=[
            tuple(voxel_size_input),
        ],
        target_resolutions=[
            tuple(voxel_size_output),
        ] * len(dataset_target_keys),
        log_processed=os.path.join(
            os.path.dirname(offset_file),
            "list_gpu_{0:}_{1:}_processed.txt".format(job_no, iteration)),
        pad_value=int(round(-factor * (shift / scale))),
        num_cpus=n_cpus)

    t_predict = time.time() - t_predict