def create_and_save_single_section(sec_relevant_mfovs, sections_map_sec_num, layer_num, wafer_folder, out_ts_fname): cur_fs = fs.open_fs(wafer_folder) if isinstance(sections_map_sec_num, list): # TODO - not implemented yet section = Section.create_from_mfovs_image_coordinates( sections_map_sec_num, layer_num, cur_fs=cur_fs, relevant_mfovs=sec_relevant_mfovs) else: section = Section.create_from_full_image_coordinates( sections_map_sec_num, layer_num, cur_fs=cur_fs, relevant_mfovs=sec_relevant_mfovs) section.save_as_json(out_ts_fname)
def load_tilespecs(ts_fname): with open(ts_fname, 'rt') as in_f: tilespec = ujson.load(in_f) wafer_num = int(os.path.basename(ts_fname).split('_')[0].split('W')[1]) #wafer_num = 1 #sec_num = int(os.path.basename(ts_fname).split('_')[-1].split('S')[1].split('R')[0]) sec_num = int(os.path.basename(ts_fname).split('Sec')[1].split('_')[0]) section = Section.create_from_tilespec(tilespec, wafer_section=(wafer_num, sec_num)) return tilespec, section
def run_stitcher(args): # Make a list of all the relevant sections with open(args.sections_list_file, 'rt') as in_f: secs_dirs = in_f.readlines() secs_dirs = [dirname.strip() for dirnamename in secs_dirs] # Make sure the folders exist all_dirs_exist = True for sec_dir in secs_dirs: if not os.path.exists(sec_dir): print("Cannot find folder: {}".format(sec_dir)) all_dirs_exist = False if not all_dirs_exist: print("One or more directories could not be found, exiting!") return if not os.path.exists(args.output_dir): os.makedirs(args.output_dir) conf = None if args.conf_fname is not None: conf = Stitcher.load_conf_from_file(args.conf_fname) stitcher = Stitcher(conf) for sec_dir in sec_dirs: # extract wafer and section# from directory name wafer_num, sec_num = sec_dir_to_wafer_section(sec_dir) out_ts_fname = os.path.join( args.output_dir, 'W{}_Sec{}_montaged.json'.format( str(wafer_num).zfill(2), str(sec_num).zfill(3))) if os.path.exists(out_ts_fname): continue layer_num = get_layer_num(sec_num, args.initial_layer_num) print("Stitching {}".format(sec_dir)) section = Section.create_from_full_image_coordinates( os.path.join(sec_dir, 'full_image_coordinates.txt'), layer_num) stitcher.stitch_section(section) # Save the tilespec section.save_as_json(out_ts_fname) # out_tilespec = section.tilespec # import json # with open(out_ts_fname, 'wt') as out_f: # json.dump(out_tilespec, out_f, sort_keys=True, indent=4) del stitcher
def run_stitcher(args): # Make up a section number section = Section.create_from_full_image_coordinates( args.images_coords_file, args.sec_num) conf = Stitcher.load_conf_from_file(args.conf_fname) stitcher = Stitcher(conf) stitcher.stitch_section(section) # will stitch and update the section # Save the transforms to file print('Writing output to: {}'.format(args.output_json)) section.save_as_json(args.output_json)
def run_stitcher(args): #common.fs_create_dir(args.output_dir) conf = None if args.conf_fname is not None: conf = Stitcher.load_conf_from_file(args.conf_fname) stitcher = Stitcher(conf) # read the inpput tilespecs in_fs = fs.open_fs(args.ts_dir) in_ts_fnames = get_ts_files( in_fs, args.ts_dir) #sorted(glob.glob(os.path.join(args.ts_dir, "*.json"))) out_fs = fs.open_fs(args.output_dir) logger.report_event("Stitching {} sections".format(len(in_ts_fnames)), log_level=logging.INFO) for in_ts_fname in in_ts_fnames: logger.report_event("Stitching {}".format(in_ts_fname), log_level=logging.DEBUG) out_ts_fname = args.output_dir + "/" + fs.path.basename(in_ts_fname) if out_fs.exists(out_ts_fname): continue print("Stitching {}".format(in_ts_fname)) with in_fs.open(fs.path.basename(in_ts_fname), 'rt') as in_f: in_ts = ujson.load(in_f) wafer_num = int( fs.path.basename(in_ts_fname).split('_')[0].split('W')[1]) sec_num = int( fs.path.basename(in_ts_fname).split('.')[0].split('_')[1].split( 'Sec')[1]) section = Section.create_from_tilespec(in_ts, wafer_section=(wafer_num, sec_num)) stitcher.stitch_section(section) # Save the tilespec section.save_as_json(out_ts_fname) # out_tilespec = section.tilespec # import json # with open(out_ts_fname, 'wt') as out_f: # json.dump(out_tilespec, out_f, sort_keys=True, indent=4) del stitcher
def run_aligner(args): # Make a list of all the relevant sections with open(args.sections_list_file, 'rt') as in_f: secs_ts_fnames = in_f.readlines() secs_ts_fnames = [fname.strip() for fname in secs_ts_fnames] # Make sure the tilespecs exist all_files_exist = True for sec_ts_fname in secs_ts_fnames: if not os.path.exists(sec_ts_fname): print("Cannot find tilespec file: {}".format(sec_ts_fname)) all_files_exist = False if not all_files_exist: print("One or more tilespecs could not be found, exiting!") return out_folder = './output_aligned_ECS_test9_cropped' conf_fname = '../../conf/conf_example.yaml' conf = StackAligner.load_conf_from_file(args.conf_fname) logger.report_event("Loading sections", log_level=logging.INFO) sections = [] # TODO - Should be done in a parallel fashion for ts_fname in secs_ts_fnames: with open(ts_fname, 'rt') as in_f: tilespec = ujson.load(in_f) wafer_num = int(os.path.basename(ts_fname).split('_')[0].split('W')[1]) sec_num = int( os.path.basename(ts_fname).split('.')[0].split('_')[1].split('Sec') [1]) sections.append( Section.create_from_tilespec(tilespec, wafer_section=(wafer_num, sec_num))) logger.report_event("Initializing aligner", log_level=logging.INFO) aligner = StackAligner(conf) logger.report_event("Aligning sections", log_level=logging.INFO) aligner.align_sections( sections) # will align and update the section tiles' transformations del aligner logger.end_process('main ending', rh_logger.ExitCode(0))
def run_stitcher(args): if not os.path.exists(args.output_dir): os.makedirs(args.output_dir) conf = None if args.conf_fname is not None: conf = Stitcher.load_conf_from_file(args.conf_fname) stitcher = Stitcher(conf) # read the inpput tilespecs in_ts_fnames = sorted(glob.glob(os.path.join(args.ts_dir, "*.json"))) logger.report_event("Stitching {} sections".format(len(in_ts_fnames)), log_level=logging.INFO) for in_ts_fname in in_ts_fnames: logger.report_event("Stitching {}".format(in_ts_fname), log_level=logging.DEBUG) out_ts_fname = os.path.join(args.output_dir, os.path.basename(in_ts_fname)) if os.path.exists(out_ts_fname): continue print("Stitching {}".format(in_ts_fname)) with open(in_ts_fname, 'rt') as in_f: in_ts = ujson.load(in_f) section = Section.create_from_tilespec(in_ts) stitcher.stitch_section(section) # Save the tilespec section.save_as_json(out_ts_fname) # out_tilespec = section.tilespec # import json # with open(out_ts_fname, 'wt') as out_f: # json.dump(out_tilespec, out_f, sort_keys=True, indent=4) del stitcher
def _load_tilespec(fname): with open(fname, 'rb') as in_f: tilespec = json.load(in_f) return Section.create_from_tilespec(tilespec)
# # Save the transforms to file # import json # print('Writing output to: {}'.format(out_fname)) # section.save_as_json(out_fname) # # img_fnames, imgs = StackAligner.read_imgs(imgs_dir) # # for img_fname, img, transform in zip(img_fnames, imgs, transforms): # # # assumption: the output image shape will be the same as the input image # # out_fname = os.path.join(out_path, os.path.basename(img_fname)) # # img_transformed = cv2.warpAffine(img, transform[:2,:], (img.shape[1], img.shape[0]), flags=cv2.INTER_AREA) # # cv2.imwrite(out_fname, img_transformed) # Testing # test_detector('/n/home10/adisuis/Harvard/git/rh_aligner/tests/ECS_test9_cropped/images/010_S10R1', conf_fname, 8, 500) logger.start_process('main', 'stitcher.py', [section_dir, conf_fname]) section = Section.create_from_full_image_coordinates( section_dir, section_num) conf = Stitcher.load_conf_from_file(conf_fname) stitcher = Stitcher(conf) stitcher.stitch_section( section) # will stitch and update the section tiles' transformations # output the section out_tilespec = section.tilespec import json with open(out_fname, 'wt') as out_f: json.dump(out_tilespec, out_f, sort_keys=True, indent=4) del stitcher logger.end_process('main ending', rh_logger.ExitCode(0))
def create_tilespecs(args): # parse the workflows directory sections_map = common.parse_workflows_folder(args.wafer_folder) logger.report_event("Finished parsing sections", log_level=logging.INFO) sorted_sec_keys = sorted(list(sections_map.keys())) if min(sorted_sec_keys) != 1: logger.report_event("Minimal section # found: {}".format( min(sorted_sec_keys)), log_level=logging.WARN) logger.report_event("Found {} sections in {}".format( len(sections_map), args.wafer_folder), log_level=logging.INFO) if len(sorted_sec_keys) != max(sorted_sec_keys): logger.report_event( "There are {} sections, but maximal section # found: {}".format( len(sections_map), max(sorted_sec_keys)), log_level=logging.WARN) missing_sections = [ i for i in range(1, max(sorted_sec_keys)) if i not in sections_map ] logger.report_event("Missing sections: {}".format(missing_sections), log_level=logging.WARN) # if there's a filtered mfovs file, parse it filtered_mfovs_map = None if args.filtered_mfovs_pkl is not None: logger.report_event("Filtering sections mfovs", log_level=logging.INFO) filtered_mfovs_map = parse_filtered_mfovs(args.filtered_mfovs_pkl) logger.report_event("Outputing sections to tilespecs directory: {}".format( args.output_dir), log_level=logging.INFO) if not os.path.exists(args.output_dir): os.makedirs(args.output_dir) for sec_num in sorted_sec_keys: # extract wafer and section# from directory name if isinstance(sections_map[sec_num], list): wafer_num, sec_num = sec_dir_to_wafer_section( os.path.dirname(sections_map[sec_num][0]), args.wafer_num) else: wafer_num, sec_num = sec_dir_to_wafer_section( sections_map[sec_num], args.wafer_num) out_ts_fname = os.path.join( args.output_dir, 'W{}_Sec{}_montaged.json'.format( str(wafer_num).zfill(2), str(sec_num).zfill(3))) if os.path.exists(out_ts_fname): logger.report_event("Already found tilespec: {}, skipping".format( os.path.basename(out_ts_fname)), log_level=logging.INFO) continue sec_relevant_mfovs = None if filtered_mfovs_map is not None: if (wafer_num, sec_num) not in filtered_mfovs_map: logger.report_event( "WARNING: cannot find filtered data for (wafer, sec): {}, skipping" .format((wafer_num, sec_num)), log_level=logging.INFO) continue sec_relevant_mfovs = filtered_mfovs_map[wafer_num, sec_num] layer_num = get_layer_num(sec_num, args.initial_layer_num) if isinstance(sections_map[sec_num], list): # TODO - not implemented yet section = Section.create_from_mfovs_image_coordinates( sections_map[sec_num], layer_num, relevant_mfovs=sec_relevant_mfovs) else: section = Section.create_from_full_image_coordinates( sections_map[sec_num], layer_num, relevant_mfovs=sec_relevant_mfovs) section.save_as_json(out_ts_fname)
import ujson import os if len(sys.argv) > 1: ts_fname = sys.argv[1] else: ts_fname = '/n/boslfs/LABS/lichtman_lab/adisuis/alignments/Zebrafish_Mariela_HindBrainROI/2d_W19_single_tiles_HBROI_gpu_opt_output_dir/W19_Sec016_montaged.json' #img_fname = '/n/lichtmangpfs01/Instrument_drop/U19_Zebrafish/EM/w019/w019_h02_20190326_00-43-52/002_S16R1/000021/002_000021_040_2019-03-26T0046443382649.bmp' with open(ts_fname, 'rt') as in_f: tilespec = ujson.load(in_f) wafer_num = int(os.path.basename(ts_fname).split('_')[0].split('W')[1]) sec_num = int( os.path.basename(ts_fname).split('.')[0].split('_')[1].split('Sec')[1]) section = Section.create_from_tilespec(tilespec, wafer_section=(wafer_num, sec_num)) mesh_spacing = 500 refined_mesh_spacing = 50 mesh_refiner = SectionMeshRefiner(section, mesh_spacing, refined_mesh_spacing) mesh_tri = Delaunay(mesh_refiner.get_refined_mesh_points()) #edges_filter = MeshEdgesFilter(mesh_tri) #filtered_edges_indices, filtered_simplices = edges_filter.filter_by_wrinkles(contours) filtered_simplices = mesh_tri.simplices assert (len(section.tilespec) == 1) img = None if DEBUG_NO_TRANSFOM:
'/n/home10/adisuis/Harvard/git_lichtmangpu01/mb_aligner/scripts/ECS_test9_cropped_011_S11R1.json', '/n/home10/adisuis/Harvard/git_lichtmangpu01/mb_aligner/scripts/ECS_test9_cropped_012_S12R1.json', '/n/home10/adisuis/Harvard/git_lichtmangpu01/mb_aligner/scripts/ECS_test9_cropped_013_S13R1.json', '/n/home10/adisuis/Harvard/git_lichtmangpu01/mb_aligner/scripts/ECS_test9_cropped_014_S14R1.json' ] out_folder = './output_aligned_ECS_test9_cropped' conf_fname = '../../conf/conf_example.yaml' logger.start_process('main', 'aligner.py', [secs_ts_fnames, conf_fname]) conf = StackAligner.load_conf_from_file(conf_fname) logger.report_event("Loading sections", log_level=logging.INFO) sections = [] # TODO - Should be done in a parallel fashion for ts_fname in secs_ts_fnames: with open(ts_fname, 'rt') as in_f: tilespec = ujson.load(in_f) wafer_num = 1 sec_num = int(os.path.basename(ts_fname).split('_')[-1].split('S')[1].split('R')[0]) sections.append(Section.create_from_tilespec(tilespec, wafer_section=(wafer_num, sec_num))) logger.report_event("Initializing aligner", log_level=logging.INFO) aligner = StackAligner(conf) logger.report_event("Aligning sections", log_level=logging.INFO) aligner.align_sections(sections) # will align and update the section tiles' transformations del aligner logger.end_process('main ending', rh_logger.ExitCode(0))