def match_all(data_dir): """For all conditions match all ground-truth cells to measured cells""" print('Loading GT') gt_cells = load( os.path.join(data_dir, 'cell_obj', 'cells_final_selected.hdf5')) storm_i = np.load(os.path.join(data_dir, 'images', 'storm_inner.npy')) for ph in [10000, 1000, 500]: print('Photons {}'.format(ph)) m_cells = load( os.path.join(data_dir, 'cell_obj', 'cell_ph_{}_raw.hdf5'.format(ph))) print('Measured cells loaded') bin_predicted = tifffile.imread( os.path.join(data_dir, 'images', 'binary_{}photons_predicted.tif'.format(ph))) print('Filtering') filtered_pred = filter_binaries(bin_predicted, min_size=495, max_size=2006.4, min_minor=7.57, max_minor=17.3, min_major=15.41, max_major=54.97) gt_match, m_match, = match_cells(gt_cells, m_cells, storm_i, filtered_pred, max_d=5) print('Matched {} cells out of max {}'.format(len(m_match), len(m_cells))) for i, (m_, gt_) in tqdm(enumerate(zip(m_match, gt_match))): m_i = m_cells.name.tolist().index(m_.rstrip()) g_i = gt_cells.name.tolist().index(gt_.rstrip()) try: assert len(m_cells[m_i].data.data_dict['storm_inner']) == len( gt_cells[g_i].data.data_dict['storm_inner']) except AssertionError: print('Assertion error:', i) with open( os.path.join(data_dir, 'matched_names', 'gt_cells_ph_{}_match.txt'.format(ph)), 'w') as f: f.writelines(gt_match) with open( os.path.join(data_dir, 'matched_names', 'm_cells_ph_{}_match.txt'.format(ph)), 'w') as f: f.writelines(m_match)
def filter_all(data_dir): """Removes STORM localizations from neighbouring cells and removes cell objects with too few for all conditions.""" gt_cells = load( os.path.join(data_dir, 'cell_obj', 'cells_final_selected.hdf5')) for ph in [10000, 1000, 500]: print('Photons', ph) with open( os.path.join(data_dir, 'matched_names', 'm_cells_ph_{}_match.txt'.format(ph)), 'r') as f: m_names = f.readlines() m_names = list([n.rstrip() for n in m_names]) m_cells = load( os.path.join(data_dir, 'cell_obj', 'cell_ph_{}_raw.hdf5'.format(ph))) with open( os.path.join(data_dir, 'matched_names', 'gt_cells_ph_{}_match.txt'.format(ph)), 'r') as f: gt_names = f.readlines() gt_names = list([n.rstrip() for n in gt_names]) m_final, gt_final, m_cells, gt_cells = filter_cells( m_names, gt_names, m_cells, gt_cells) with open( os.path.join(data_dir, 'matched_names', 'gt_cells_ph_{}_match_filter.txt'.format(ph)), 'w') as f: f.writelines(gt_final) with open( os.path.join(data_dir, 'matched_names', 'm_cells_ph_{}_match_filter.txt'.format(ph)), 'w') as f: f.writelines(m_final) for i, (m_, gt_) in tqdm(enumerate(zip(m_final, gt_final))): m_i = m_cells.name.tolist().index(m_.rstrip()) g_i = gt_cells.name.tolist().index(gt_.rstrip()) try: assert len(m_cells[m_i].data.data_dict['storm_inner']) == len( gt_cells[g_i].data.data_dict['storm_inner']) except AssertionError: print('Assertion error:', i) save( os.path.join(data_dir, 'cell_obj', 'cell_ph_{}_filtered.hdf5'.format(ph)), m_cells)
def gen_im(data_dir): """Generate microscopy images from a list of cell objects by placing them randomly oriented in the image.""" cell_list = load( os.path.join(data_dir, 'cell_obj', 'cells_final_selected.hdf5')) out_dict = generate_images(cell_list, 1000, 10, 3, (512, 512)) if not os.path.exists(os.path.join(data_dir, 'images')): os.mkdir(os.path.join(data_dir, 'images')) np.save(os.path.join(data_dir, 'images', 'binary.npy'), out_dict['binary']) np.save(os.path.join(data_dir, 'images', 'brightfield.npy'), out_dict['brightfield']) np.save(os.path.join(data_dir, 'images', 'foci_inner.npy'), out_dict['foci_inner']) np.save(os.path.join(data_dir, 'images', 'foci_outer.npy'), out_dict['foci_outer']) np.save(os.path.join(data_dir, 'images', 'storm_inner.npy'), out_dict['storm_inner']) np.save(os.path.join(data_dir, 'images', 'storm_outer.npy'), out_dict['storm_outer']) tifffile.imsave(os.path.join(data_dir, 'images', 'binary.tif'), out_dict['binary']) tifffile.imsave(os.path.join(data_dir, 'images', 'brightfield.tif'), out_dict['brightfield']) tifffile.imsave(os.path.join(data_dir, 'images', 'foci_inner.tif'), out_dict['foci_inner']) tifffile.imsave(os.path.join(data_dir, 'images', 'foci_outer.tif'), out_dict['foci_outer']) np.savetxt(os.path.join(data_dir, 'images', 'storm_inner.txt'), out_dict['storm_inner']) np.savetxt(os.path.join(data_dir, 'images', 'storm_outer.txt'), out_dict['storm_inner'])
def get_obj_values_all(data_dir): gt_cells = load( os.path.join(data_dir, 'cell_obj', 'cells_final_selected.hdf5')) for ph in [10000, 1000, 500]: print('Photons', ph) m_names = np.genfromtxt(os.path.join( data_dir, 'matched_names', 'm_cells_ph_{}_match_filter.txt'.format(ph)), dtype=str) gt_names = np.genfromtxt(os.path.join( data_dir, 'matched_names', 'gt_cells_ph_{}_match_filter.txt'.format(ph)), dtype=str) for condition in ['binary', 'brightfield', 'storm_inner']: print('Condition', condition) m_cells = load( os.path.join( data_dir, 'cell_obj', 'm_cells_ph_{}_filtered_{}.hdf5'.format(ph, condition))) # Get index arrays to sort saved cell lists by matched names. m_index = np.searchsorted(m_cells.name, m_names) gt_index = np.searchsorted(gt_cells.name, gt_names) # sorting CellList object by indexing m_sorted = m_cells[m_index] gt_sorted = gt_cells[gt_index] result = np.array([ get_value(gt, m) for m, gt in tqdm(zip(m_sorted, gt_sorted), total=len(m_sorted)) ]) np.savetxt( os.path.join( data_dir, 'obj_values', 'obj_vals_storm_ph_{}_{}.txt'.format(ph, condition)), result) np.save( os.path.join( data_dir, 'obj_values', 'obj_vals_storm_ph_{}_{}.npy'.format(ph, condition)), result)
def optimize_all(data_dir): """Optimize the cell's coordinate systems for each condition based on different data elements""" for ph in [10000, 1000, 500]: print('Photons {}'.format(ph)) m_cells = load( os.path.join(data_dir, 'cell_obj', 'cell_ph_{}_filtered.hdf5'.format(ph))) print('Measured cells loaded') print('binary') optimize_cells = m_cells.copy() res = optimize_cells.optimize_mp() obj_vals = [r.objective_value for r in res] np.savetxt( os.path.join(data_dir, 'minimize_res', 'm_cells_ph_{}_binary.txt'.format(ph)), obj_vals) save( os.path.join(data_dir, 'cell_obj', 'm_cells_ph_{}_filtered_binary.hdf5'.format(ph)), optimize_cells) print('brightfield') optimize_cells = m_cells.copy() res = optimize_cells.optimize_mp('brightfield') obj_vals = [r.objective_value for r in res] np.savetxt( os.path.join(data_dir, 'minimize_res', 'm_cells_ph_{}_brightfield.txt'.format(ph)), obj_vals) save( os.path.join(data_dir, 'cell_obj', 'm_cells_ph_{}_filtered_brightfield.hdf5'.format(ph)), optimize_cells) print('storm inner') optimize_cells = m_cells.copy() res = optimize_cells.optimize_mp('storm_inner') obj_vals = [r.objective_value for r in res] np.savetxt( os.path.join(data_dir, 'minimize_res', 'm_cells_ph_{}_storm.txt'.format(ph)), obj_vals) save( os.path.join(data_dir, 'cell_obj', 'm_cells_ph_{}_filtered_storm_inner.hdf5'.format(ph)), optimize_cells)
def get_r_vals_all(data_dir): gt_cells = load( os.path.join(data_dir, 'cell_obj', 'cells_final_selected.hdf5')) for ph in [10000, 1000, 500]: print('Photons', ph) m_names = np.genfromtxt(os.path.join( data_dir, 'matched_names', 'm_cells_ph_{}_match_filter.txt'.format(ph)), dtype=str) gt_names = np.genfromtxt(os.path.join( data_dir, 'matched_names', 'gt_cells_ph_{}_match_filter.txt'.format(ph)), dtype=str) for condition in ['binary', 'brightfield', 'storm_inner']: print('Condition', condition) m_cells = load( os.path.join( data_dir, 'cell_obj', 'm_cells_ph_{}_filtered_{}.hdf5'.format(ph, condition))) # Get index arrays to sort saved cell lists by matched names. m_index = np.searchsorted(m_cells.name, m_names) gt_index = np.searchsorted(gt_cells.name, gt_names) # sorting CellList object by indexing; no copying is done. m_sorted = m_cells[m_index] gt_sorted = gt_cells[gt_index] result = np.array( [process_cells(m, gt) for m, gt in zip(m_sorted, gt_sorted)]) out_arr = np.full((len(m_sorted), 4, 720), fill_value=np.nan ) # Max number of localizations per cell < 720 for (r0, r1, r2, r3), elem in zip( result, out_arr): # m_inner, m_outer, gt_inner, gt_outer elem[0][:len(r0)] = r0 elem[1][:len(r1)] = r1 elem[2][:len(r2)] = r2 elem[3][:len(r3)] = r3 np.savetxt( os.path.join(data_dir, 'r_values', 'r_inner_m_ph_{}_{}.txt'.format(ph, condition)), out_arr[:, 0, :]) np.savetxt( os.path.join(data_dir, 'r_values', 'r_outer_m_ph_{}_{}.txt'.format(ph, condition)), out_arr[:, 1, :]) np.savetxt( os.path.join(data_dir, 'r_values', 'r_inner_gt_ph_{}_{}.txt'.format(ph, condition)), out_arr[:, 2, :]) np.savetxt( os.path.join(data_dir, 'r_values', 'r_outer_gt_ph_{}_{}.txt'.format(ph, condition)), out_arr[:, 3, :]) np.save( os.path.join(data_dir, 'r_values', 'r_inner_m_ph_{}_{}.npy'.format(ph, condition)), out_arr[:, 0, :]) np.save( os.path.join(data_dir, 'r_values', 'r_outer_m_ph_{}_{}.npy'.format(ph, condition)), out_arr[:, 1, :]) np.save( os.path.join(data_dir, 'r_values', 'r_inner_gt_ph_{}_{}.npy'.format(ph, condition)), out_arr[:, 2, :]) np.save( os.path.join(data_dir, 'r_values', 'r_outer_gt_ph_{}_{}.npy'.format(ph, condition)), out_arr[:, 3, :])
arr[np.isinf(arr)] = 10 return arr[~np.isnan(arr)] if reload: for ph in photons: im = np.load( os.path.join(data_dir, 'images', 'bf_noise_{}_photons.npy'.format(ph))) np.save( os.path.join(data_dir, 'plot_vars', 'bf_noise_{}_photons.npy'.format(ph)), im[0]) for condition in conditions: cells = load( os.path.join( data_dir, 'cell_obj', 'm_cells_ph_{}_filtered_{}.hdf5'.format(ph, condition))) save( os.path.join(data_dir, 'plot_vars', 'cells_{}_{}_photons.hdf5'.format(condition, ph)), cells[:50]) cell_dict = {} for ph in photons: cell_dict[ph] = {} for condition in conditions: cell_dict[ph][condition] = load( os.path.join(data_dir, 'plot_vars', 'cells_{}_{}_photons.hdf5'.format(condition, ph))) imgs = {
import matplotlib.pyplot as plt import numpy as np from colicoords import load, CellPlot from colicoords.support import running_sum import matplotlib.gridspec as gridspec from pycorrelate import ucorrelate from scipy.ndimage.filters import uniform_filter1d import os import seaborn as sns upscale = 15 # pixels upscale factor for SMLM reconstruction # 20181204_lacy_sr\20181204_cell_10.hdf5 cell_storm = load('storm_cell.hdf5') fig_width = 8.53534 / 2.54 fig = plt.figure(figsize=(fig_width, 6.739342011413036)) frac_bot = 0.61 zx_min = 10 zx_max = 20 w_r = zx_max - zx_min zy_max = 23 zy_min = 11 h_r = zy_max - zy_min shape_l = cell_storm.data.binary_img.shape rl = shape_l[1] / shape_l[0] rr = w_r / h_r
import matplotlib.pyplot as plt from colicoords import Cell, load, save, CellPlot import os cell = load(r'img191c002.hdf5') data = cell.data.copy() cell_raw = Cell(data[:, 1:-1]) reload = False if reload: cell_bin = cell_raw.copy() cell_bin.optimize() save('cell_bin.hdf5', cell_bin) cell_bf = cell_raw.copy() cell_bf.optimize('brightfield') cell_bf.measure_r() save('cell_bf.hdf5', cell_bf) cell_flu = cell_raw.copy() cell_flu.optimize('gain50') cell_flu.measure_r() save('cell_flu.hdf5', cell_flu) else: cell_bin = load('cell_bin.hdf5') cell_bf = load('cell_bf.hdf5') cell_flu = load('cell_flu.hdf5') fig_width = 8.53534 / 2.54 fig, axes = plt.subplots(3, 3, figsize=(fig_width, fig_width))
import numpy as np from colicoords import Cell, load, CellPlot from matplotlib.patches import Arc from mpl_toolkits.axes_grid1 import make_axes_locatable from matplotlib.image import AxesImage import os def calc_dx_dy(cell, x): d_xr = cell.coords.p_dx(x) dy = 1 / np.sqrt(d_xr**2 + 1) dx = d_xr / np.sqrt(d_xr**2 + 1) return dx, dy cell = load(r'../../data/lacy_selected_cell_3.hdf5') data = cell.data.copy() c = Cell(data) c.optimize('brightfield') reconstructed_bf = c.reconstruct_image('brightfield', step=0.5) c.data.add_data(reconstructed_bf, 'brightfield', 'sim_bf') cp = CellPlot(c) fig_width = 8.53534 / 2.54 fig = plt.figure(figsize=(fig_width, 5.5)) gs0 = GridSpec(1, 1) ax0 = fig.add_subplot(gs0[0]) cmap = plt.cm.gray