def apply_image_processing(config, ion_datacube): """ Function to apply pre-defined image processing methods to ion_datacube #todo: expose parameters in config :param ion_datacube: object from pyImagingMSpec.ion_datacube already containing images :return: ion_datacube is updated in place. None returned """ from pyImagingMSpec import smoothing #todo hot_spot_removal shouldn't be separately coded - should be within smooth_methods of config and iterated over # every method in smoothing should accept (im,**args) q = config['image_generation']['q'] if q > 0: for xic in ion_datacube.xic: smoothing.hot_spot_removal(xic, q) #updated in place smooth_method = config['image_generation']['smooth'] smooth_params = config['image_generation']['smooth_params'] if not smooth_method == '': for ii in range(ion_datacube.n_im): im = ion_datacube.xic_to_image(ii) #todo: for method in smoothing_methods: methodToCall = getattr(smoothing,smooth_method) im_s = methodToCall(im,**smooth_params) ion_datacube.xic[ii] = ion_datacube.image_to_xic(im_s) return None
def compute(iso_images_sparse, sf_ints): np.seterr(invalid='ignore') # to ignore division by zero warnings diff = len(sf_ints) - len(iso_images_sparse) iso_imgs = [ empty_matrix if img is None else img.toarray() for img in iso_images_sparse + [None] * diff ] iso_imgs_flat = [img.flat[:][sample_area_mask] for img in iso_imgs] if img_gen_conf['do_preprocessing']: for img in iso_imgs_flat: smoothing.hot_spot_removal(img) m = ImgMetrics(metrics) if len(iso_imgs) > 0: m.map['spectral'] = isotope_pattern_match(iso_imgs_flat, sf_ints) m.map['spatial'] = isotope_image_correlation(iso_imgs_flat, weights=sf_ints[1:]) moc = measure_of_chaos(iso_imgs[0], img_gen_conf['nlevels']) m.map['chaos'] = 0 if np.isclose(moc, 1.0) else moc m.map['total_iso_ints'] = [img.sum() for img in iso_imgs] m.map['min_iso_ints'] = [img.min() for img in iso_imgs] m.map['max_iso_ints'] = [img.max() for img in iso_imgs] return m.to_tuple()
def compute(iso_images_sparse, sf_ints): diff = len(sf_ints) - len(iso_images_sparse) iso_imgs = [empty_matrix if img is None else img.toarray() for img in iso_images_sparse + [None] * diff] iso_imgs_flat = [img.flat[:][sample_area_mask] for img in iso_imgs] if img_gen_conf['do_preprocessing']: for img in iso_imgs_flat: smoothing.hot_spot_removal(img) measures = ImgMeasures(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) if len(iso_imgs) > 0: measures.pattern_match = isotope_pattern_match(iso_imgs_flat, sf_ints) measures.image_corr = isotope_image_correlation(iso_imgs_flat, weights=sf_ints[1:]) moc = measure_of_chaos(iso_imgs[0], img_gen_conf['nlevels']) measures.chaos = 0 if np.isclose(moc, 1.0) else moc measures.image_corr_01, measures.image_corr_02, measures.image_corr_03, measures.image_corr_12, \ measures.image_corr_13, measures.image_corr_23 = isotope_image_correlation_sd(iso_imgs_flat) measures.snr = snr_img(iso_imgs[0]) measures.percent_0s = percent_zero(iso_imgs[0]) measures.peak_int_diff_0, measures.peak_int_diff_1, measures.peak_int_diff_2, measures.peak_int_diff_3 = spectra_int_diff(iso_imgs_flat, sf_ints) measures.quart_1, measures.quart_2, measures.quart_3 = quartile_pxl(iso_imgs[0]) measures.ratio_peak_01, measures.ratio_peak_02, measures.ratio_peak_03, measures.ratio_peak_12, \ measures.ratio_peak_13, measures.ratio_peak_23 = ratio_peaks(iso_imgs_flat) measures.percentile_10, measures.percentile_20, measures.percentile_30, measures.percentile_40, \ measures.percentile_50, measures.percentile_60, measures.percentile_70, measures.percentile_80, \ measures.percentile_90 = decile_pxl(iso_imgs[0]) return measures.to_tuple()
def compute(iso_images_sparse, sf_ints): diff = len(sf_ints) - len(iso_images_sparse) iso_imgs = [ empty_matrix if img is None else img.toarray() for img in iso_images_sparse + [None] * diff ] iso_imgs_flat = [img.flat[:][sample_area_mask] for img in iso_imgs] if img_gen_conf['do_preprocessing']: for img in iso_imgs_flat: smoothing.hot_spot_removal(img) measures = ImgMeasures(0, 0, 0) if len(iso_imgs) > 0: measures.pattern_match = isotope_pattern_match( iso_imgs_flat, sf_ints) measures.image_corr = isotope_image_correlation( iso_imgs_flat, weights=sf_ints[1:]) moc = measure_of_chaos(iso_imgs[0], img_gen_conf['nlevels']) measures.chaos = 0 if np.isclose(moc, 1.0) else moc return measures.to_tuple()
def compute(iso_images_sparse, sf_ints): np.seterr(invalid='ignore') # to ignore division by zero warnings diff = len(sf_ints) - len(iso_images_sparse) iso_imgs = [empty_matrix if img is None else img.toarray() for img in iso_images_sparse + [None] * diff] iso_imgs_flat = [img.flat[:][sample_area_mask] for img in iso_imgs] if img_gen_conf['do_preprocessing']: for img in iso_imgs_flat: smoothing.hot_spot_removal(img) m = ImgMetrics(metrics) if len(iso_imgs) > 0: m.map['spectral'] = isotope_pattern_match(iso_imgs_flat, sf_ints) m.map['spatial'] = isotope_image_correlation(iso_imgs_flat, weights=sf_ints[1:]) moc = measure_of_chaos(iso_imgs[0], img_gen_conf['nlevels']) m.map['chaos'] = 0 if np.isclose(moc, 1.0) else moc m.map['total_iso_ints'] = [img.sum() for img in iso_imgs] m.map['min_iso_ints'] = [img.min() for img in iso_imgs] m.map['max_iso_ints'] = [img.max() for img in iso_imgs] return m.to_tuple()
def compute(iso_images_sparse, formula_ints): np.seterr(invalid='ignore') # to ignore division by zero warnings m = METRICS.copy() if len(iso_images_sparse) > 0: iso_imgs = [ img.toarray() if img is not None else empty_matrix for img in iso_images_sparse ] iso_imgs_flat = [ img.flatten()[sample_area_mask_flat] for img in iso_imgs ] iso_imgs_flat = iso_imgs_flat[:len(formula_ints)] if img_gen_config.get('do_preprocessing', False): for img in iso_imgs_flat: smoothing.hot_spot_removal(img) m['spectral'] = isotope_pattern_match(iso_imgs_flat, formula_ints) if m['spectral'] > 0: m['spatial'] = isotope_image_correlation( iso_imgs_flat, weights=formula_ints[1:]) if m['spatial'] > 0: moc = measure_of_chaos(iso_imgs[0], img_gen_config.get('nlevels', 30)) m['chaos'] = 0 if np.isclose(moc, 1.0) else moc if m['chaos'] > 0: m['msm'] = m['chaos'] * m['spatial'] * m['spectral'] m['total_iso_ints'] = [img.sum() for img in iso_imgs] m['min_iso_ints'] = [img.min() for img in iso_imgs] m['max_iso_ints'] = [img.max() for img in iso_imgs] metrics = OrderedDict((k, replace_nan(v)) for k, v in m.items()) return metrics
def plot_images(ion_datacube,iso_spect,iso_max,q_val=99,c_map='hot'): import numpy as np import matplotlib.pyplot as plt from pyImagingMSpec.image_measures import measure_of_chaos, isotope_image_correlation, isotope_pattern_match from pyImagingMSpec import smoothing as im_smoothing for ii in range(0, iso_max): # hot-spot removal xic = ion_datacube.xic[ii] im_smoothing.hot_spot_removal(xic, q_val) # updated in place im = ion_datacube.xic_to_image(ii) #im = im_smoothing.median(im, size=3) ion_datacube.xic[ii] = ion_datacube.image_to_xic(im) measure_value_score = measure_of_chaos( ion_datacube.xic_to_image(0), 30) # 3. Score correlation with monoiso if len(iso_spect[1]) > 1: iso_correlation_score = isotope_image_correlation( ion_datacube.xic, weights=iso_spect[1][1:]) else: # only one isotope peak, so correlation doesn't make sense iso_correlation_score = 1 iso_ratio_score = isotope_pattern_match(ion_datacube.xic,iso_spect[1]) msm_score = measure_value_score*iso_correlation_score*iso_ratio_score ax = [ plt.subplot2grid((2, 4), (0, 0)), plt.subplot2grid((2, 4), (0, 1)), plt.subplot2grid((2, 4), (0, 2)), plt.subplot2grid((2, 4), (0, 3)), plt.subplot2grid((2, 4), (1, 0), colspan=4, rowspan=1) ] for a in ax: a.cla() # plot images for ii in range(0,iso_max): im = ion_datacube.xic_to_image(ii) ax[ii].imshow(im,cmap=c_map,interpolation='nearest') ax[ii].set_title('m/z: {:3.4f}'.format(iso_spect[0][ii])) ax[ii].set_xticks([],[]) ax[ii].set_yticks([],[]) # plot spectrum notnull=ion_datacube.xic_to_image(0)>0 data_spect = [np.sum(ion_datacube.xic_to_image(ii)[notnull]) for ii in range(0,iso_max)] data_spect = data_spect / np.linalg.norm(data_spect) iso_spect[1] = iso_spect[1]/np.linalg.norm(iso_spect[1]) markerline, stemlines, baseline = ax[4].stem(iso_spect[0][0:iso_max],iso_spect[1][0:iso_max],'g') plt.title("moc: {:3.5f} spat: {:3.5f} spec: {:3.5f} msm: {:3.5f}".format(measure_value_score,iso_correlation_score,iso_ratio_score,msm_score)) plt.setp(stemlines, linewidth=2, color='g') # set stems colors plt.setp(markerline, 'markerfacecolor', 'g','markeredgecolor','g') # make points markerline, stemlines, baseline = ax[4].stem(iso_spect[0][0:iso_max],data_spect,'r') plt.setp(stemlines, linewidth=2, color='r') # set stems colors plt.setp(markerline, 'markerfacecolor', 'r','markeredgecolor','r') # make points #plot proxy artist proxies=[] h, = plt.plot(iso_spect[0][0],[0],'-g') proxies.append(h) h, = plt.plot(iso_spect[0][0],[0],'-r') proxies.append(h) ax[4].legend(proxies,('predicted pattern','data pattern'), numpoints=1) return ax
def plot_images(ion_datacube, iso_spect, iso_max, q_val=99, c_map='hot'): import numpy as np import matplotlib.pyplot as plt from pyImagingMSpec.image_measures import measure_of_chaos, isotope_image_correlation, isotope_pattern_match from pyImagingMSpec import smoothing as im_smoothing for ii in range(0, iso_max): # hot-spot removal xic = ion_datacube.xic[ii] im_smoothing.hot_spot_removal(xic, q_val) # updated in place im = ion_datacube.xic_to_image(ii) #im = im_smoothing.median(im, size=3) ion_datacube.xic[ii] = ion_datacube.image_to_xic(im) measure_value_score = measure_of_chaos(ion_datacube.xic_to_image(0), 30) # 3. Score correlation with monoiso if len(iso_spect[1]) > 1: iso_correlation_score = isotope_image_correlation( ion_datacube.xic, weights=iso_spect[1][1:]) else: # only one isotope peak, so correlation doesn't make sense iso_correlation_score = 1 iso_ratio_score = isotope_pattern_match(ion_datacube.xic, iso_spect[1]) msm_score = measure_value_score * iso_correlation_score * iso_ratio_score ax = [ plt.subplot2grid((2, 4), (0, 0)), plt.subplot2grid((2, 4), (0, 1)), plt.subplot2grid((2, 4), (0, 2)), plt.subplot2grid((2, 4), (0, 3)), plt.subplot2grid((2, 4), (1, 0), colspan=4, rowspan=1) ] for a in ax: a.cla() # plot images for ii in range(0, iso_max): im = ion_datacube.xic_to_image(ii) ax[ii].imshow(im, cmap=c_map, interpolation='nearest') ax[ii].set_title('m/z: {:3.4f}'.format(iso_spect[0][ii])) ax[ii].set_xticks([], []) ax[ii].set_yticks([], []) # plot spectrum notnull = ion_datacube.xic_to_image(0) > 0 data_spect = [ np.sum(ion_datacube.xic_to_image(ii)[notnull]) for ii in range(0, iso_max) ] data_spect = data_spect / np.linalg.norm(data_spect) iso_spect[1] = iso_spect[1] / np.linalg.norm(iso_spect[1]) markerline, stemlines, baseline = ax[4].stem(iso_spect[0][0:iso_max], iso_spect[1][0:iso_max], 'g') plt.title("moc: {:3.5f} spat: {:3.5f} spec: {:3.5f} msm: {:3.5f}".format( measure_value_score, iso_correlation_score, iso_ratio_score, msm_score)) plt.setp(stemlines, linewidth=2, color='g') # set stems colors plt.setp(markerline, 'markerfacecolor', 'g', 'markeredgecolor', 'g') # make points markerline, stemlines, baseline = ax[4].stem(iso_spect[0][0:iso_max], data_spect, 'r') plt.setp(stemlines, linewidth=2, color='r') # set stems colors plt.setp(markerline, 'markerfacecolor', 'r', 'markeredgecolor', 'r') # make points #plot proxy artist proxies = [] h, = plt.plot(iso_spect[0][0], [0], '-g') proxies.append(h) h, = plt.plot(iso_spect[0][0], [0], '-r') proxies.append(h) ax[4].legend(proxies, ('predicted pattern', 'data pattern'), numpoints=1) return ax