def compute(iso_images_sparse, sf_ints):
        np.seterr(invalid='ignore')  # to ignore division by zero warnings

        diff = len(sf_ints) - len(iso_images_sparse)
        iso_imgs = [
            empty_matrix if img is None else img.toarray()
            for img in iso_images_sparse + [None] * diff
        ]
        iso_imgs_flat = [img.flat[:][sample_area_mask] for img in iso_imgs]

        if img_gen_conf['do_preprocessing']:
            for img in iso_imgs_flat:
                smoothing.hot_spot_removal(img)

        m = ImgMetrics(metrics)
        if len(iso_imgs) > 0:
            m.map['spectral'] = isotope_pattern_match(iso_imgs_flat, sf_ints)
            m.map['spatial'] = isotope_image_correlation(iso_imgs_flat,
                                                         weights=sf_ints[1:])
            moc = measure_of_chaos(iso_imgs[0], img_gen_conf['nlevels'])
            m.map['chaos'] = 0 if np.isclose(moc, 1.0) else moc

            m.map['total_iso_ints'] = [img.sum() for img in iso_imgs]
            m.map['min_iso_ints'] = [img.min() for img in iso_imgs]
            m.map['max_iso_ints'] = [img.max() for img in iso_imgs]
        return m.to_tuple()
Пример #2
0
    def compute(iso_images_sparse, sf_ints):
        diff = len(sf_ints) - len(iso_images_sparse)
        iso_imgs = [empty_matrix if img is None else img.toarray()
                    for img in iso_images_sparse + [None] * diff]
        iso_imgs_flat = [img.flat[:][sample_area_mask] for img in iso_imgs]

        if img_gen_conf['do_preprocessing']:
            for img in iso_imgs_flat:
                smoothing.hot_spot_removal(img)

        measures = ImgMeasures(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                               0, 0, 0)
        if len(iso_imgs) > 0:
            measures.pattern_match = isotope_pattern_match(iso_imgs_flat, sf_ints)
            measures.image_corr = isotope_image_correlation(iso_imgs_flat, weights=sf_ints[1:])
            moc = measure_of_chaos(iso_imgs[0], img_gen_conf['nlevels'])
            measures.chaos = 0 if np.isclose(moc, 1.0) else moc
            measures.image_corr_01, measures.image_corr_02, measures.image_corr_03, measures.image_corr_12, \
            measures.image_corr_13, measures.image_corr_23 = isotope_image_correlation_sd(iso_imgs_flat)
            measures.snr = snr_img(iso_imgs[0])
            measures.percent_0s = percent_zero(iso_imgs[0])
            measures.peak_int_diff_0, measures.peak_int_diff_1, measures.peak_int_diff_2, measures.peak_int_diff_3 = spectra_int_diff(iso_imgs_flat, sf_ints)
            measures.quart_1, measures.quart_2, measures.quart_3 = quartile_pxl(iso_imgs[0])
            measures.ratio_peak_01, measures.ratio_peak_02, measures.ratio_peak_03, measures.ratio_peak_12, \
            measures.ratio_peak_13, measures.ratio_peak_23 = ratio_peaks(iso_imgs_flat)
            measures.percentile_10, measures.percentile_20, measures.percentile_30, measures.percentile_40, \
            measures.percentile_50, measures.percentile_60, measures.percentile_70, measures.percentile_80, \
            measures.percentile_90 = decile_pxl(iso_imgs[0])

        return measures.to_tuple()
Пример #3
0
    def compute(iso_images_sparse, sf_ints):
        diff = len(sf_ints) - len(iso_images_sparse)
        iso_imgs = [
            empty_matrix if img is None else img.toarray()
            for img in iso_images_sparse + [None] * diff
        ]
        iso_imgs_flat = [img.flat[:][sample_area_mask] for img in iso_imgs]

        if img_gen_conf['do_preprocessing']:
            for img in iso_imgs_flat:
                smoothing.hot_spot_removal(img)

        measures = ImgMeasures(0, 0, 0)
        if len(iso_imgs) > 0:
            measures.pattern_match = isotope_pattern_match(
                iso_imgs_flat, sf_ints)
            measures.image_corr = isotope_image_correlation(
                iso_imgs_flat, weights=sf_ints[1:])
            moc = measure_of_chaos(iso_imgs[0], img_gen_conf['nlevels'])
            measures.chaos = 0 if np.isclose(moc, 1.0) else moc
        return measures.to_tuple()
    def compute(iso_images_sparse, sf_ints):
        np.seterr(invalid='ignore')  # to ignore division by zero warnings

        diff = len(sf_ints) - len(iso_images_sparse)
        iso_imgs = [empty_matrix if img is None else img.toarray()
                    for img in iso_images_sparse + [None] * diff]
        iso_imgs_flat = [img.flat[:][sample_area_mask] for img in iso_imgs]

        if img_gen_conf['do_preprocessing']:
            for img in iso_imgs_flat:
                smoothing.hot_spot_removal(img)

        m = ImgMetrics(metrics)
        if len(iso_imgs) > 0:
            m.map['spectral'] = isotope_pattern_match(iso_imgs_flat, sf_ints)
            m.map['spatial'] = isotope_image_correlation(iso_imgs_flat, weights=sf_ints[1:])
            moc = measure_of_chaos(iso_imgs[0], img_gen_conf['nlevels'])
            m.map['chaos'] = 0 if np.isclose(moc, 1.0) else moc

            m.map['total_iso_ints'] = [img.sum() for img in iso_imgs]
            m.map['min_iso_ints'] = [img.min() for img in iso_imgs]
            m.map['max_iso_ints'] = [img.max() for img in iso_imgs]
        return m.to_tuple()
Пример #5
0
    def compute(iso_images_sparse, formula_ints):
        np.seterr(invalid='ignore')  # to ignore division by zero warnings

        m = METRICS.copy()
        if len(iso_images_sparse) > 0:
            iso_imgs = [
                img.toarray() if img is not None else empty_matrix
                for img in iso_images_sparse
            ]

            iso_imgs_flat = [
                img.flatten()[sample_area_mask_flat] for img in iso_imgs
            ]
            iso_imgs_flat = iso_imgs_flat[:len(formula_ints)]

            if img_gen_config.get('do_preprocessing', False):
                for img in iso_imgs_flat:
                    smoothing.hot_spot_removal(img)

            m['spectral'] = isotope_pattern_match(iso_imgs_flat, formula_ints)
            if m['spectral'] > 0:

                m['spatial'] = isotope_image_correlation(
                    iso_imgs_flat, weights=formula_ints[1:])
                if m['spatial'] > 0:

                    moc = measure_of_chaos(iso_imgs[0],
                                           img_gen_config.get('nlevels', 30))
                    m['chaos'] = 0 if np.isclose(moc, 1.0) else moc
                    if m['chaos'] > 0:

                        m['msm'] = m['chaos'] * m['spatial'] * m['spectral']
                        m['total_iso_ints'] = [img.sum() for img in iso_imgs]
                        m['min_iso_ints'] = [img.min() for img in iso_imgs]
                        m['max_iso_ints'] = [img.max() for img in iso_imgs]
        metrics = OrderedDict((k, replace_nan(v)) for k, v in m.items())
        return metrics
Пример #6
0
def plot_images(ion_datacube,iso_spect,iso_max,q_val=99,c_map='hot'):
    import numpy as np
    import matplotlib.pyplot as plt
    from pyImagingMSpec.image_measures import measure_of_chaos, isotope_image_correlation, isotope_pattern_match
    from pyImagingMSpec import smoothing as im_smoothing
    for ii in range(0, iso_max):
        # hot-spot removal
        xic = ion_datacube.xic[ii]
        im_smoothing.hot_spot_removal(xic, q_val)  # updated in place
        im = ion_datacube.xic_to_image(ii)
        #im = im_smoothing.median(im, size=3)
        ion_datacube.xic[ii] = ion_datacube.image_to_xic(im)
    measure_value_score = measure_of_chaos(
            ion_datacube.xic_to_image(0), 30)
    # 3. Score correlation with monoiso
    if len(iso_spect[1]) > 1:
        iso_correlation_score = isotope_image_correlation(
            ion_datacube.xic, weights=iso_spect[1][1:])
    else:  # only one isotope peak, so correlation doesn't make sense
        iso_correlation_score = 1
    iso_ratio_score = isotope_pattern_match(ion_datacube.xic,iso_spect[1])
    msm_score = measure_value_score*iso_correlation_score*iso_ratio_score

    ax = [   plt.subplot2grid((2, 4), (0, 0)),
         plt.subplot2grid((2, 4), (0, 1)),
         plt.subplot2grid((2, 4), (0, 2)),
         plt.subplot2grid((2, 4), (0, 3)),
         plt.subplot2grid((2, 4), (1, 0), colspan=4, rowspan=1)
     ]
    for a in ax:
        a.cla()
    # plot images
    for ii in range(0,iso_max):
        im = ion_datacube.xic_to_image(ii)
        ax[ii].imshow(im,cmap=c_map,interpolation='nearest')
        ax[ii].set_title('m/z: {:3.4f}'.format(iso_spect[0][ii]))
        ax[ii].set_xticks([],[])
        ax[ii].set_yticks([],[])
    # plot spectrum
    notnull=ion_datacube.xic_to_image(0)>0
    data_spect = [np.sum(ion_datacube.xic_to_image(ii)[notnull]) for ii in range(0,iso_max)]
    data_spect = data_spect / np.linalg.norm(data_spect)
    iso_spect[1] = iso_spect[1]/np.linalg.norm(iso_spect[1])

    markerline, stemlines, baseline = ax[4].stem(iso_spect[0][0:iso_max],iso_spect[1][0:iso_max],'g')
    plt.title("moc: {:3.5f} spat: {:3.5f} spec: {:3.5f} msm: {:3.5f}".format(measure_value_score,iso_correlation_score,iso_ratio_score,msm_score))
    plt.setp(stemlines, linewidth=2, color='g')     # set stems  colors
    plt.setp(markerline, 'markerfacecolor', 'g','markeredgecolor','g')    # make points 

    markerline, stemlines, baseline = ax[4].stem(iso_spect[0][0:iso_max],data_spect,'r')
    plt.setp(stemlines, linewidth=2, color='r')     # set stems colors
    plt.setp(markerline, 'markerfacecolor', 'r','markeredgecolor','r')    # make points 

    #plot proxy artist
    proxies=[]
    h, = plt.plot(iso_spect[0][0],[0],'-g')
    proxies.append(h)
    h, = plt.plot(iso_spect[0][0],[0],'-r')
    proxies.append(h)
    ax[4].legend(proxies,('predicted pattern','data pattern'), numpoints=1)
    return ax
Пример #7
0
def run_search(config, IMS_dataset, sum_formulae, adducts, mz_list):
    import time
    from pyImagingMSpec import image_measures
    ### Runs the main pipeline
    # Get sum formula and predicted m/z peaks for molecules in database
    ppm = config['image_generation']['ppm']  # parts per million -  a measure of how accuracte the mass spectrometer is
    nlevels = config['image_generation']['nlevels']  # parameter for measure of chaos
    do_preprocessing = config['image_generation']['do_preprocessing']
    interp = config['image_generation']['smooth']
    measure_value_score = {}
    iso_correlation_score = {}
    iso_ratio_score = {}
    t0 = time.time()
    t_el = 0
    for adduct in adducts:
        print 'searching -> {}'.format(adduct)
        for ii,sum_formula in enumerate(sum_formulae):
            if sum_formula not in mz_list:
                print 'mssing sf: {}'.format(sum_formula)
                continue
            if adduct not in mz_list[sum_formula]:
                # adduct may not be present if it would make an impossible formula, is there a better way to handle this?
                # this hack is also used for fdr calculations
                # print '{} adduct not found for {}'.format(adduct, sum_formula)
                continue
            if time.time() - t_el > 10.:
                t_el = time.time()
                print '{:3.2f} done in {:3.0f} seconds'.format(float(ii)/len(sum_formulae),time.time()-t0)
            # Allocate dicts if required
            if not sum_formula in measure_value_score:
                    measure_value_score[sum_formula] = {}
            if not sum_formula in iso_correlation_score:
                    iso_correlation_score[sum_formula] = {}
            if not sum_formula in iso_ratio_score:
                    iso_ratio_score[sum_formula] = {}
            try:
                # 1. Generate ion images
                mzs = mz_list[sum_formula][adduct][0] #+ 5*mz_list[sum_formula][adduct][0]*1e-6
                ion_datacube = IMS_dataset.get_ion_image(mzs, ppm)  # for each spectrum, sum the intensity of all peaks within tol of mz_list
                if do_preprocessing:
                    apply_image_processing(config,ion_datacube) #currently just supports hot-spot removal
                # 2. Spatial Chaos
                measure_value_score[sum_formula][adduct] = image_measures.measure_of_chaos(
                    ion_datacube.xic_to_image(0), nlevels)
                if measure_value_score[sum_formula][adduct] == 1:
                    measure_value_score[sum_formula][adduct] = 0
                # 3. Score correlation with monoiso
                if len(mz_list[sum_formula][adduct][1]) > 1:
                    iso_correlation_score[sum_formula][adduct] = image_measures.isotope_image_correlation(
                        ion_datacube.xic, weights=mz_list[sum_formula][adduct][1][1:])
                else:  # only one isotope peak, so correlation doesn't make sense
                    iso_correlation_score[sum_formula][adduct] = 1
                # 4. Score isotope ratio
                iso_ratio_score[sum_formula][adduct] = image_measures.isotope_pattern_match(ion_datacube.xic,
                                                                                                   mz_list[sum_formula][
                                                                                                       adduct][1])
            except KeyError as e:
                print str(e)
                print "bad key in: \"{}\" \"{}\" ".format(sum_formula, adduct)
        output_results(config, measure_value_score, iso_correlation_score, iso_ratio_score, sum_formulae, [adduct], mz_list)
    return measure_value_score, iso_correlation_score, iso_ratio_score
Пример #8
0
def plot_images(ion_datacube, iso_spect, iso_max, q_val=99, c_map='hot'):
    import numpy as np
    import matplotlib.pyplot as plt
    from pyImagingMSpec.image_measures import measure_of_chaos, isotope_image_correlation, isotope_pattern_match
    from pyImagingMSpec import smoothing as im_smoothing
    for ii in range(0, iso_max):
        # hot-spot removal
        xic = ion_datacube.xic[ii]
        im_smoothing.hot_spot_removal(xic, q_val)  # updated in place
        im = ion_datacube.xic_to_image(ii)
        #im = im_smoothing.median(im, size=3)
        ion_datacube.xic[ii] = ion_datacube.image_to_xic(im)
    measure_value_score = measure_of_chaos(ion_datacube.xic_to_image(0), 30)
    # 3. Score correlation with monoiso
    if len(iso_spect[1]) > 1:
        iso_correlation_score = isotope_image_correlation(
            ion_datacube.xic, weights=iso_spect[1][1:])
    else:  # only one isotope peak, so correlation doesn't make sense
        iso_correlation_score = 1
    iso_ratio_score = isotope_pattern_match(ion_datacube.xic, iso_spect[1])
    msm_score = measure_value_score * iso_correlation_score * iso_ratio_score

    ax = [
        plt.subplot2grid((2, 4), (0, 0)),
        plt.subplot2grid((2, 4), (0, 1)),
        plt.subplot2grid((2, 4), (0, 2)),
        plt.subplot2grid((2, 4), (0, 3)),
        plt.subplot2grid((2, 4), (1, 0), colspan=4, rowspan=1)
    ]
    for a in ax:
        a.cla()
    # plot images
    for ii in range(0, iso_max):
        im = ion_datacube.xic_to_image(ii)
        ax[ii].imshow(im, cmap=c_map, interpolation='nearest')
        ax[ii].set_title('m/z: {:3.4f}'.format(iso_spect[0][ii]))
        ax[ii].set_xticks([], [])
        ax[ii].set_yticks([], [])
    # plot spectrum
    notnull = ion_datacube.xic_to_image(0) > 0
    data_spect = [
        np.sum(ion_datacube.xic_to_image(ii)[notnull])
        for ii in range(0, iso_max)
    ]
    data_spect = data_spect / np.linalg.norm(data_spect)
    iso_spect[1] = iso_spect[1] / np.linalg.norm(iso_spect[1])

    markerline, stemlines, baseline = ax[4].stem(iso_spect[0][0:iso_max],
                                                 iso_spect[1][0:iso_max], 'g')
    plt.title("moc: {:3.5f} spat: {:3.5f} spec: {:3.5f} msm: {:3.5f}".format(
        measure_value_score, iso_correlation_score, iso_ratio_score,
        msm_score))
    plt.setp(stemlines, linewidth=2, color='g')  # set stems  colors
    plt.setp(markerline, 'markerfacecolor', 'g', 'markeredgecolor',
             'g')  # make points

    markerline, stemlines, baseline = ax[4].stem(iso_spect[0][0:iso_max],
                                                 data_spect, 'r')
    plt.setp(stemlines, linewidth=2, color='r')  # set stems colors
    plt.setp(markerline, 'markerfacecolor', 'r', 'markeredgecolor',
             'r')  # make points

    #plot proxy artist
    proxies = []
    h, = plt.plot(iso_spect[0][0], [0], '-g')
    proxies.append(h)
    h, = plt.plot(iso_spect[0][0], [0], '-r')
    proxies.append(h)
    ax[4].legend(proxies, ('predicted pattern', 'data pattern'), numpoints=1)
    return ax
Пример #9
0
def spectral_metric(iso_imgs_flat, formula_ints):
    # Ignore div-by-zero / NaN errors - they're handled internally
    with np.errstate(divide='ignore', invalid='ignore'):
        return np.nan_to_num(isotope_pattern_match(iso_imgs_flat,
                                                   formula_ints))