def segment(folder, params): ''' Segment all layers in a folder ''' #create folders for the output filelib.make_folders([ params.inputfolder + '../segmented/masks/glomeruli/' + folder, params.inputfolder + '../segmented/masks/kidney/' + folder, params.inputfolder + '../log/' + folder ]) start = time.time() #list all files in the folder files = filelib.list_image_files(params.inputfolder + folder) files.sort() params.folder = folder #segment all layers in parallel boost.run_parallel(process=segment_layer, files=files, params=params, procname='Segmentation of glomeruli') # segment_layer(files[50], params) t = pd.Series({ 'Segmentation': time.time() - start, 'threads': params.max_threads }) t.to_csv(params.inputfolder + '../log/' + folder + 'Segmentation.csv', sep='\t')
def segment(folder, params): ''' Segment all layers in a folder ''' #create folders for the output filelib.make_folders([params.inputfolder + '../segmented/outlines/' + folder, params.inputfolder + '../segmented/masks/' + folder]) #list all files in the folder files = filelib.list_image_files(params.inputfolder + folder) files.sort() ind = np.int_(np.arange(0, len(files), 10)) files = np.array(files)[ind] if not len(filelib.list_image_files(params.inputfolder + '../segmented/masks/' + folder)) == len(files): params.folder = folder #segment all layers in parallel boost.run_parallel(process = segment_layer, files = files, params = params, procname = 'Segmentation of glomeruli')
settingsfile = 'settings.csv' params = pd.Series.from_csv(settingsfile, sep='\t') #list folders with stacks to be analyzed folders = list_subfolders(params.inputfolder, subfolders=[]) params.max_threads = 30 #segment each stack for folder in folders: segment(folder, params) #label connected objects params.max_threads = 25 boost.run_parallel(process=label_cells, files=folders, params=params, procname='Labelling') #quantify the segmented data boost.run_parallel(process=quantify, files=folders, params=params, procname='Quantification') filelib.combine_statistics( params.inputfolder + '../statistics/', params.inputfolder + '../statistics/statistics_combined.csv') compute_stack_stat( params.inputfolder + '../statistics/statistics_combined.csv', params) compute_size_distribution(
except: settingsfile = 'settings.csv' params = pd.Series.from_csv(settingsfile, sep = '\t') #list folders with stacks to be analyzed folders = list_subfolders(params.inputfolder, subfolders = []) #segment each stack for folder in folders: print folder segment(folder, params) #quantify the segmented data boost.run_parallel(process = quantify, files = folders, params = params, procname = 'Quantification') filelib.combine_statistics(params.inputfolder + '../statistics/', params.inputfolder + '../statistics_combined.csv')