예제 #1
0
def complete_inference(sample, gpu_list, iteration):
    path = '/nrs/saalfeld/lauritzen/%s/workspace.n5/raw' % sample
    assert os.path.exists(path), "Path to N5 dataset with raw data and mask does not exist"
    rf = z5py.File(path, use_zarr_format=False)
    assert 'gray' in rf, "Raw data not present in N5 dataset"
    assert 'mask' in rf, "Mask not present in N5 dataset"

    shape = rf['gray'].shape

    # create the datasets
    output_shape = (71, 650, 650)
    out_file = '/nrs/saalfeld/heinrichl/test/lauritzen/%s/workspace.n5' %sample
    if not os.path.exists(out_file):
        os.mkdir(out_file)

    f = z5py.File(out_file, use_zarr_format=False)
    # the n5 datasets might exist already

    f.create_dataset('syncleft_dist_DTU-2_{0:}'.format(iteration),
                     shape=shape,
                     compressor='gzip',
                     dtype='float32',
                     chunks=output_shape)
    f.create_dataset('syncleft_cc_DTU-2_{0:}'.format(iteration),
                     shape=shape,
                     compressor='gzip',
                     dtype='uint64',
                     chunks=output_shape)


    # make the offset files, that assign blocks to gpus
    # generate offset lists with mask
    offset_folder = '/nrs/saalfeld/heinrichl/synapses/scott_offsets_{0:}_DTU2_inf/'.format(sample)
    if not os.path.exists(offset_folder):
        os.mkdir(offset_folder)
    offset_list = precompute_offset_list(path, output_shape, offset_folder)
    mhash = hash(path)
    offset_list_from_precomputed(offset_list, gpu_list, os.path.join(offset_folder, 'offsets_%i' % mhash))

    # run multiprocessed inference
    with ProcessPoolExecutor(max_workers=len(gpu_list)) as pp:
        tasks = [pp.submit(single_inference, sample, gpu, iteration) for gpu in gpu_list]
        result = [t.result() for t in tasks]

    if all(result):
        print("All gpu's finished inference properly.")
    else:
        print("WARNING: at least one process didn't finish properly.")
예제 #2
0
def complete_inference(path, gpu_list, iteration):

    assert os.path.exists(
        path), "Path to N5 dataset with raw data and mask does not exist"
    f = z5py.File(path, use_zarr_format=False)
    assert 'gray' in f, "Raw data not present in N5 dataset"
    assert 'mask' in f, "Mask not present in N5 dataset"

    shape = f['gray'].shape
    # create the datasets
    out_shape = (56, ) * 3

    # the n5 datasets might exist already
    if not 'affs_xy' in f:
        f.create_dataset('affs_xy',
                         shape=shape,
                         compressor='gzip',
                         dtype='float32',
                         chunks=out_shape)
    if not 'affs_z' in f:
        f.create_dataset('affs_z',
                         shape=shape,
                         compressor='gzip',
                         dtype='float32',
                         chunks=out_shape)

    # make the offset files, that assign blocks to gpus
    output_shape = (56, 56, 56)
    # generate offset lists with mask
    offset_list = precompute_offset_list(path, output_shape)
    mhash = hash(path)
    offset_folder = os.path.join(
        os.path.split(os.path.realpath(__file__))[0], './offsets_%i' % mhash)
    offset_list_from_precomputed(offset_list, gpu_list, offset_folder)

    # run multiprocessed inference
    with ProcessPoolExecutor(max_workers=len(gpu_list)) as pp:
        tasks = [
            pp.submit(single_inference, path, gpu, iteration, offset_folder)
            for gpu in gpu_list
        ]
        result = [t.result() for t in tasks]

    if all(result):
        print("All gpu's finished inference properly.")
    else:
        print("WARNING: at least one process didn't finish properly.")
예제 #3
0
def complete_inference(path, gpu_list, iteration):

    assert os.path.exists(
        path), "Path to N5 dataset with raw data and mask does not exist"
    f = z5py.File(path, use_zarr_format=False)
    assert 'gray' in f, "Raw data not present in N5 dataset"
    assert 'mask' in f, "Mask not present in N5 dataset"

    shape = f['gray'].shape

    # create the datasets
    output_shape = (60, 596, 596)

    # the n5 datasets might exist already
    if 'predictions/full_affs' not in f:

        if 'predictions' not in f:
            f.create_group('predictions')

        chunks = (3, ) + tuple(outs // 2 for outs in output_shape)
        aff_shape = (12, ) + shape
        f.create_dataset('predictions/full_affs',
                         shape=aff_shape,
                         compression='gzip',
                         dtype='float32',
                         chunks=chunks)

    # make the offset files, that assign blocks to gpus
    # generate offset lists with mask
    offset_list = precompute_offset_list(path, output_shape)
    mhash = hashlib.md5(path.encode('utf-8')).hexdigest()
    offset_list_from_precomputed(offset_list, gpu_list, './offsets_%s' % mhash)

    # run multiprocessed inference
    with ProcessPoolExecutor(max_workers=len(gpu_list)) as pp:
        tasks = [
            pp.submit(single_inference, path, gpu, iteration)
            for gpu in gpu_list
        ]
        result = [t.result() for t in tasks]

    if all(result):
        print("All gpu's finished inference properly.")
    else:
        print("WARNING: at least one process didn't finish properly.")
def complete_inference(gpu_list, iteration):
    path = '/nrs/saalfeld/sample_E/sample_E.n5'
    assert os.path.exists(path), "Path to N5 dataset with raw data and mask does not exist"
    rf = z5py.File(path, use_zarr_format=False)
    assert 'volumes/raw/s0' in rf, "Raw data not present in N5 dataset"

    shape = rf['volumes/raw/s0'].shape

    # create the datasets
    output_shape = (71, 650, 650)
    out_file = '/data/heinrichl/sample_E.n5'
    if not os.path.exists(out_file):
        os.mkdir(out_file)

    f = z5py.File(out_file, use_zarr_format=False)
    # the n5 datasets might exist already
    key = 'syncleft_dist_DTU-2_{0:}'.format(iteration)
    if key not in f:
        f.create_dataset(key,
                         shape=shape,
                         compression='gzip',
                         dtype='float32',
                         chunks=(71, 325, 325))


    # make the offset files, that assign blocks to gpus
    # generate offset lists with mask
    offset_folder = '/nrs/saalfeld/heinrichl/synapses/sampleE_DTU2_offsets_update/'
    if not os.path.exists(offset_folder):
        os.mkdir(offset_folder)
    offset_list = os.path.join(offset_folder, 'block_list_in_mask.json')
    offset_list_from_precomputed(offset_list, gpu_list, os.path.join(offset_folder))

    # run multiprocessed inference
    with ProcessPoolExecutor(max_workers=len(gpu_list)) as pp:
        tasks = [pp.submit(single_inference, path, gpu, iteration) for gpu in gpu_list]
        result = [t.result() for t in tasks]

    if all(result):
        print("All gpu's finished inference properly.")
    else:
        print("WARNING: at least one process didn't finish properly.")
def complete_inference(path, network_key, gpu_list, iteration):

    assert os.path.exists(path), "Path to N5 dataset with raw data and mask does not exist"
    f = z5py.File(path, use_zarr_format=False)
    assert 'gray' in f, "Raw data not present in N5 dataset"
    assert 'masks/initial_mask' in f, "Mask not present in N5 dataset"

    shape = f['gray'].shape

    output_shape = networks[network_key]['output_shape']

    # create the datasets
    # the n5 datasets might exist already
    target_key = 'predictions/affs_glia'
    if target_key not in f:

        if 'predictions' not in f:
            f.create_group('predictions')

        if output_shape[0] > 30 and all(outs % 2 == 0 for outs in output_shape):
            chunks = (3,) + tuple(outs // 2 for outs in output_shape)
        else:
            chunks = (3,) + output_shape

        aff_shape = (13,) + shape
        f.create_dataset(target_key,
                         shape=aff_shape,
                         compression='gzip',
                         dtype='uint8',
                         chunks=chunks)

    # make the offset files, that assign blocks to gpus
    # generate offset lists with mask
    offset_list = precompute_offset_list(path, output_shape)
    offset_list_from_precomputed(offset_list, gpu_list, './offsets')

    # run multiprocessed inference
    with ProcessPoolExecutor(max_workers=len(gpu_list)) as pp:
        tasks = [pp.submit(single_inference, path, network_key, gpu, iteration)
                 for gpu in gpu_list]
        [t.result() for t in tasks]
def prepare_inference(gpu_list):
    # path to the raw data
    raw_path = '/groups/saalfeld/saalfeldlab/FAFB00/v14_align_tps_20170818_dmg.n5/volumes/raw'
    assert os.path.exists(
        raw_path), "Path to N5 dataset with raw data and mask does not exist"
    rf = z5py.File(raw_path, use_zarr_format=False)
    shape = rf['s0'].shape

    # create the datasets
    out_shape = (71, 650, 650)
    out_file = '/nrs/saalfeld/FAFB00/v14_align_tps_20170818_dmg.n5'
    if not os.path.exists(out_file):
        os.mkdir(out_file)
    f = z5py.File(out_file, use_zarr_format=False)
    g1 = f.create_group('volumes')
    g2 = g1.create_group('predictions')
    g3 = g2.create_group('synapses_dt')
    g3.create_dataset('s0',
                      shape=shape,
                      compression='gzip',
                      level=6,
                      dtype='uint8',
                      chunks=out_shape)

    metadata_folder = '/nrs/saalfeld/heinrichl/fafb_meta/'
    assert os.path.exists(metadata_folder)
    offset_list_from_precomputed(os.path.join(
        metadata_folder, 'list_gpu_all_part2_missing.json'),
                                 gpu_list,
                                 metadata_folder,
                                 list_name_extension='_part2_missing')
    #offset_list_from_precomputed(os.path.join(metadata_folder, 'block_list_in_mask_ordered_part2_local.json'),
    #                             gpu_list_local, metadata_folder, list_name_extension='_part2')
    script_generator_lsf.write_scripts(gpu_list)
    script_generator_local.write_scripts(
        gpu_list,
        list(range(8)) * (len(gpu_list) / 8) + list(range(len(gpu_list) % 8)))
    script_generator_local.write_scripts_choose_gpu(gpu_list)
예제 #7
0
def main(config_file):
    with open(config_file) as f:
        config = json.load(f)

    assert os.path.exists(
        config['raw_path']
    ), "Path to N5 dataset with raw data and mask does not exist"
    assert os.path.exists(
        config['meta_path']), "Path to directory for meta data does not exist"
    precompute_offset_list(config['raw_path'],
                           config['output_shape'],
                           os.path.join(config['meta_path'],
                                        config['blocklist_file']),
                           mask_key=config['mask_keys'],
                           force_recomputation=config['force_recomputation'])
    run_filemaking(config['raw_path'], config['output_shape'],
                   config['out_file'], config['target_keys'],
                   config['data_key'], config)
    offset_list_from_precomputed(
        str(os.path.join(config['meta_path'],
                         config['blocklist_file'])), config['gpu_list'],
        config['meta_path'], config['offset_list_name_extension'])
    script_generator_lsf.write_scripts(config['gpu_list'])
예제 #8
0
def prepare_inference(blocklist_file, gpu_list, metadata_folder):
    offset_list_from_precomputed(blocklist_file, gpu_list, metadata_folder)
    script_generator_lsf.write_scripts(gpu_list)
예제 #9
0
def make_list():
    in_list = './block_list_in_mask.json'
    gpu_list = range(16)
    offset_list_from_precomputed(in_list, gpu_list, './offset_lists')