Exemplo n.º 1
0
def main(config_file):
    with open(config_file) as f:
        config = json.load(f)

    assert os.path.exists(
        config['raw_path']
    ), "Path to N5 dataset with raw data and mask does not exist"
    assert os.path.exists(
        config['meta_path']), "Path to directory for meta data does not exist"
    precompute_offset_list(config['raw_path'],
                           config['output_shape'],
                           os.path.join(config['meta_path'],
                                        config['blocklist_file']),
                           mask_key=config['mask_keys'],
                           force_recomputation=config['force_recomputation'])
    run_filemaking(config['raw_path'], config['output_shape'],
                   config['out_file'], config['target_keys'],
                   config['data_key'], config)
    offset_list_from_precomputed(
        str(os.path.join(config['meta_path'],
                         config['blocklist_file'])), config['gpu_list'],
        config['meta_path'], config['offset_list_name_extension'])
    script_generator_lsf.write_scripts(config['gpu_list'])
Exemplo n.º 2
0
def complete_inference(sample, gpu_list, iteration):
    path = '/nrs/saalfeld/lauritzen/%s/workspace.n5/raw' % sample
    assert os.path.exists(path), "Path to N5 dataset with raw data and mask does not exist"
    rf = z5py.File(path, use_zarr_format=False)
    assert 'gray' in rf, "Raw data not present in N5 dataset"
    assert 'mask' in rf, "Mask not present in N5 dataset"

    shape = rf['gray'].shape

    # create the datasets
    output_shape = (71, 650, 650)
    out_file = '/nrs/saalfeld/heinrichl/test/lauritzen/%s/workspace.n5' %sample
    if not os.path.exists(out_file):
        os.mkdir(out_file)

    f = z5py.File(out_file, use_zarr_format=False)
    # the n5 datasets might exist already

    f.create_dataset('syncleft_dist_DTU-2_{0:}'.format(iteration),
                     shape=shape,
                     compressor='gzip',
                     dtype='float32',
                     chunks=output_shape)
    f.create_dataset('syncleft_cc_DTU-2_{0:}'.format(iteration),
                     shape=shape,
                     compressor='gzip',
                     dtype='uint64',
                     chunks=output_shape)


    # make the offset files, that assign blocks to gpus
    # generate offset lists with mask
    offset_folder = '/nrs/saalfeld/heinrichl/synapses/scott_offsets_{0:}_DTU2_inf/'.format(sample)
    if not os.path.exists(offset_folder):
        os.mkdir(offset_folder)
    offset_list = precompute_offset_list(path, output_shape, offset_folder)
    mhash = hash(path)
    offset_list_from_precomputed(offset_list, gpu_list, os.path.join(offset_folder, 'offsets_%i' % mhash))

    # run multiprocessed inference
    with ProcessPoolExecutor(max_workers=len(gpu_list)) as pp:
        tasks = [pp.submit(single_inference, sample, gpu, iteration) for gpu in gpu_list]
        result = [t.result() for t in tasks]

    if all(result):
        print("All gpu's finished inference properly.")
    else:
        print("WARNING: at least one process didn't finish properly.")
Exemplo n.º 3
0
def complete_inference(path, gpu_list, iteration):

    assert os.path.exists(
        path), "Path to N5 dataset with raw data and mask does not exist"
    f = z5py.File(path, use_zarr_format=False)
    assert 'gray' in f, "Raw data not present in N5 dataset"
    assert 'mask' in f, "Mask not present in N5 dataset"

    shape = f['gray'].shape
    # create the datasets
    out_shape = (56, ) * 3

    # the n5 datasets might exist already
    if not 'affs_xy' in f:
        f.create_dataset('affs_xy',
                         shape=shape,
                         compressor='gzip',
                         dtype='float32',
                         chunks=out_shape)
    if not 'affs_z' in f:
        f.create_dataset('affs_z',
                         shape=shape,
                         compressor='gzip',
                         dtype='float32',
                         chunks=out_shape)

    # make the offset files, that assign blocks to gpus
    output_shape = (56, 56, 56)
    # generate offset lists with mask
    offset_list = precompute_offset_list(path, output_shape)
    mhash = hash(path)
    offset_folder = os.path.join(
        os.path.split(os.path.realpath(__file__))[0], './offsets_%i' % mhash)
    offset_list_from_precomputed(offset_list, gpu_list, offset_folder)

    # run multiprocessed inference
    with ProcessPoolExecutor(max_workers=len(gpu_list)) as pp:
        tasks = [
            pp.submit(single_inference, path, gpu, iteration, offset_folder)
            for gpu in gpu_list
        ]
        result = [t.result() for t in tasks]

    if all(result):
        print("All gpu's finished inference properly.")
    else:
        print("WARNING: at least one process didn't finish properly.")
Exemplo n.º 4
0
def complete_inference(path, gpu_list, iteration):

    assert os.path.exists(
        path), "Path to N5 dataset with raw data and mask does not exist"
    f = z5py.File(path, use_zarr_format=False)
    assert 'gray' in f, "Raw data not present in N5 dataset"
    assert 'mask' in f, "Mask not present in N5 dataset"

    shape = f['gray'].shape

    # create the datasets
    output_shape = (60, 596, 596)

    # the n5 datasets might exist already
    if 'predictions/full_affs' not in f:

        if 'predictions' not in f:
            f.create_group('predictions')

        chunks = (3, ) + tuple(outs // 2 for outs in output_shape)
        aff_shape = (12, ) + shape
        f.create_dataset('predictions/full_affs',
                         shape=aff_shape,
                         compression='gzip',
                         dtype='float32',
                         chunks=chunks)

    # make the offset files, that assign blocks to gpus
    # generate offset lists with mask
    offset_list = precompute_offset_list(path, output_shape)
    mhash = hashlib.md5(path.encode('utf-8')).hexdigest()
    offset_list_from_precomputed(offset_list, gpu_list, './offsets_%s' % mhash)

    # run multiprocessed inference
    with ProcessPoolExecutor(max_workers=len(gpu_list)) as pp:
        tasks = [
            pp.submit(single_inference, path, gpu, iteration)
            for gpu in gpu_list
        ]
        result = [t.result() for t in tasks]

    if all(result):
        print("All gpu's finished inference properly.")
    else:
        print("WARNING: at least one process didn't finish properly.")
def complete_inference(path, network_key, gpu_list, iteration):

    assert os.path.exists(path), "Path to N5 dataset with raw data and mask does not exist"
    f = z5py.File(path, use_zarr_format=False)
    assert 'gray' in f, "Raw data not present in N5 dataset"
    assert 'masks/initial_mask' in f, "Mask not present in N5 dataset"

    shape = f['gray'].shape

    output_shape = networks[network_key]['output_shape']

    # create the datasets
    # the n5 datasets might exist already
    target_key = 'predictions/affs_glia'
    if target_key not in f:

        if 'predictions' not in f:
            f.create_group('predictions')

        if output_shape[0] > 30 and all(outs % 2 == 0 for outs in output_shape):
            chunks = (3,) + tuple(outs // 2 for outs in output_shape)
        else:
            chunks = (3,) + output_shape

        aff_shape = (13,) + shape
        f.create_dataset(target_key,
                         shape=aff_shape,
                         compression='gzip',
                         dtype='uint8',
                         chunks=chunks)

    # make the offset files, that assign blocks to gpus
    # generate offset lists with mask
    offset_list = precompute_offset_list(path, output_shape)
    offset_list_from_precomputed(offset_list, gpu_list, './offsets')

    # run multiprocessed inference
    with ProcessPoolExecutor(max_workers=len(gpu_list)) as pp:
        tasks = [pp.submit(single_inference, path, network_key, gpu, iteration)
                 for gpu in gpu_list]
        [t.result() for t in tasks]