def make_relabel_scripts(path, n_jobs, block_shape, tmp_dir): from cluster_tools.relabel import make_batch_jobs make_batch_jobs(path, 'segmentations/watershed_glia2', os.path.join(tmp_dir, 'tmp_files', 'tmp_relabel'), block_shape, n_jobs, EXECUTABLE, use_bsub=True, eta=[20, 10, 20])
def make_relabel_scripts(path, n_jobs, block_shape, tmp_dir): sys.path.append('../../..') from cluster_tools.relabel import make_batch_jobs make_batch_jobs(path, 'segmentations/watershed2', os.path.join(tmp_dir, 'tmp_files', 'tmp_relabel'), block_shape, n_jobs, EXECUTABLE, use_bsub=True, eta=[5, 5, 5])
def make_ws_scripts(path, n_jobs, block_shape, tmp_dir): from cluster_tools.dt_components import make_batch_jobs make_batch_jobs(path, aff_key='predictions/affs_glia', out_key='segmentations/dt_components', mask_key='masks/minfilter_mask', cache_folder=os.path.join(tmp_dir, 'tmp_ws'), n_jobs=n_jobs, block_shape=block_shape, ws_key='segmentations/watershed2', executable=EXECUTABLE, use_bsub=True)
def make_minfilter_scripts(path, n_jobs, chunks, filter_shape, block_shape): from cluster_tools.minfilter import make_batch_jobs make_batch_jobs(path, 'masks/initial_mask', path, 'masks/minfilter_mask', chunks, filter_shape, block_shape, os.path.join(tmp_dir, 'tmp_files', 'tmp_minfilter'), n_jobs=n_jobs, executable=EXECUTABLE, use_bsub=True, eta=5)
def make_graph_scripts(path, n_scales, n_jobs, n_threads, block_shape, tmp_dir): from cluster_tools.graph import make_batch_jobs make_batch_jobs(path, 'segmentations/watershed_glia2', os.path.join(tmp_dir, 'tmp_files', 'graph.n5'), os.path.join(tmp_dir, 'tmp_files', 'tmp_graph'), block_shape, n_scales, n_jobs, EXECUTABLE, use_bsub=False, n_threads_merge=16, eta=[30, 10, 60, 10])
def make_graph_scripts(path, n_scales, n_jobs, n_threads, block_shape, tmp_dir): sys.path.append('../../..') from cluster_tools.graph import make_batch_jobs make_batch_jobs(path, 'segmentations/watershed2', os.path.join(tmp_dir, 'tmp_files', 'graph.n5'), os.path.join(tmp_dir, 'tmp_files', 'tmp_graph'), block_shape, n_scales, n_jobs, EXECUTABLE, use_bsub=True, n_threads_merge=n_threads, eta=[10, 10, 10, 10])
def make_cost_scripts(path, n_jobs, n_threads, tmp_dir): from cluster_tools.costs import make_batch_jobs rf_path = '' make_batch_jobs(os.path.join(tmp_dir, 'tmp_files', 'features.n5'), 'features', os.path.join(tmp_dir, 'tmp_files', 'graph.n5'), 'graph', rf_path, os.path.join(tmp_dir, 'tmp_files', 'costs.n5'), 'costs', n_jobs, os.path.join(tmp_dir, 'tmp_files', 'tmp_costs'), n_threads, executable=EXECUTABLE, use_bsub=True, eta=5)
def make_projection_scripts(path, n_jobs, block_shape, tmp_dir): from cluster_tools.label_projection import make_batch_jobs chunks = [bs // 2 for bs in block_shape] # chunks = block_shape make_batch_jobs(path, 'segmentations/watershed_glia2', path, 'segmentations/multicut_glia', path, 'node_labelings/multicut_glia', os.path.join(tmp_dir, 'tmp_files', 'tmp_projection'), block_shape, chunks, n_jobs, executable=EXECUTABLE, use_bsub=True, eta=5)
def make_ws_scripts(path, aff_path, n_jobs, block_shape, tmp_dir): from cluster_tools.masked_watershed import make_batch_jobs chunks = [bs // 2 for bs in block_shape] # chunks = block_shape make_batch_jobs(aff_path, 'predictions/affs_glia', path, 'masks/minfilter_mask', path, 'segmentations/watershed_glia2', os.path.join(tmp_dir, 'tmp_files', 'tmp_ws'), block_shape, chunks, n_jobs, EXECUTABLE, use_bsub=True, n_threads_ufd=12, eta=[180, 30, 30, 60])
def make_multicut_scripts(path, n_scales, n_jobs, n_threads, block_shape, tmp_dir): from cluster_tools.multicut import make_batch_jobs make_batch_jobs(os.path.join(tmp_dir, 'tmp_files', 'graph.n5'), 'graph', os.path.join(tmp_dir, 'tmp_files', 'tmp_mc', 'merged_graph.n5', 's0'), 'costs', path, 'node_labelings/multicut_glia', block_shape, n_scales, os.path.join(tmp_dir, 'tmp_files', 'tmp_m'), n_jobs, n_threads=n_threads, executable=EXECUTABLE, use_bsub=True, eta=[90, 90, 180])
def make_projection_scripts(path, n_jobs, block_shape, tmp_dir, res_key): sys.path.append('../../..') from cluster_tools.label_projection import make_batch_jobs chunks = [bs // 2 for bs in block_shape] # chunks = block_shape make_batch_jobs(path, 'segmentations/watershed2', path, 'segmentations/%s' % res_key, path, 'node_labelings/%s' % res_key, os.path.join(tmp_dir, 'tmp_files', 'tmp_projection'), block_shape, chunks, n_jobs, executable=EXECUTABLE, use_bsub=True, eta=5)
def make_multicut_scripts(path, n_scales, n_jobs, n_threads, block_shape, tmp_dir, res_key): sys.path.append('../../..') from cluster_tools.multicut import make_batch_jobs make_batch_jobs(os.path.join(tmp_dir, 'tmp_files', 'graph.n5'), 'graph', os.path.join(tmp_dir, 'tmp_files', 'costs.n5'), 'costs', path, 'node_labelings/%s' % res_key, block_shape, n_scales, os.path.join(tmp_dir, 'tmp_files', 'tmp_mc'), n_jobs, n_threads=n_threads, executable=EXECUTABLE, use_bsub=True, eta=[5, 5, 15])
def make_feature_scripts(path, aff_path, n_jobs1, n_jobs2, n_threads, block_shape, tmp_dir): from cluster_tools.features import make_batch_jobs make_batch_jobs(os.path.join(tmp_dir, 'tmp_files', 'graph.n5'), 'graph', os.path.join(tmp_dir, 'tmp_files', 'features.n5'), 'features', aff_path, 'predictions/affs_glia', path, 'segmentations/watershed_glia2', os.path.join(tmp_dir, 'tmp_files', 'tmp_features'), block_shape, n_jobs1, n_jobs2, n_threads2=n_threads, executable=EXECUTABLE, use_bsub=True, eta=[60, 15])