def get_resonance_choices(resonance, correlations, experiment_name, level=95): """ Determine which chemical shift ranges at the given confidence level for an experiment contain the input resonance. If so adds the matching correlation to a dictionary and scores the resonance against the probability density functions. The correlation are sorted by there amino acid in the dictionary. :param resonance: float or list of float chemical shifts. :param correlations: list of pluq.base.Correlation :param experiment_name: one of the key from inbase.standard_experiments :param level: int, one of the defined levels normally in [68, 85, 95] :return dict[res] = list(Assignment, ...) """ pdf_dict = inbase.read_pdf(experiment_name) levels = list(pdf_dict.attrs['confidence_levels']) try: ind = levels.index(level) except ValueError: mesg = 'Chose a confidence level from {}'.format(levels) raise ValueError(mesg) # Find all the hits exp = inbase.standard_experiments[experiment_name] if exp.dims == 1: new_correlations = [] for corr in correlations: try: cs_range = pdf_dict[str(corr) + ',levs'][ind] except KeyError: continue if min(cs_range) <= resonance <= max(cs_range): new_correlations.append(corr) correlations = new_correlations else: region_dict = fileio.read_region(experiment_name, level) regions = [region_dict[str(x)] for x in correlations] # Find all the hits hits = map(Point(resonance).within, regions) correlations = list(compress(correlations, hits)) # Score all the hits assignments = collections.defaultdict(list) for corr in correlations: try: smooth = inbase.get_pdf(corr, pdf_dict) corr_score = float(smooth.score(resonance)) except ValueError: corr_score = 0 except KeyError: corr_score = 0 ss_scores = [] for ss in ['H', 'C', 'E']: try: ss_corr = Correlation(corr.aa, corr.atoms, ss) corr_ss_smooth = inbase.get_pdf(ss_corr, pdf_dict) ss_scores.append(float(corr_ss_smooth.score(resonance))) except ValueError: ss_scores.append(0) except KeyError: ss_scores = None break assign = Assignment(corr.aa, corr.atoms, corr_score, ss_scores) assignments[corr.aa].append(assign) return assignments
if __name__ == "__main__": corr = Correlation('A', ('CA', 'CB'), 'H') pacsy = DBMySQL(db='pacsy_local', password='') pacsy_corr = PacsyCorrelation(corr, pacsy) data = pacsy_corr.get_cs(piqc=True, model='all', sigma_n=3, like_ss=True) smooth = estimate_pdf(data, params={'bandwidth': np.linspace(0.4, 1.5, 15)}) pdf_dict = read_pdf('cc') other = get_pdf(corr, pdf_dict) plt.figure(1) x, y = smooth.grid z = smooth.pdf plt.contour(x, y, z, sorted(list(smooth.get_levels(data, 68, 98, 95)))) plt.figure(2) x, y = other.grid z = other.pdf l = other.levels plt.contour(x, y, z, l)
from pluq.base import Correlation from pluq.fileio import read_pdf from pluq.inbase import get_pdf, _region import matplotlib.pyplot as plt from descartes import PolygonPatch corr = Correlation('A', ('CB', 'CA'), ss='X') pdf_dict = read_pdf('cc') pdf = get_pdf(corr, pdf_dict) fig = plt.figure() ax = fig.add_subplot(111) plt.imshow(pdf.pdf, interpolation="none", aspect='auto', origin='lower', extent=(pdf.limits[0][0], pdf.limits[0][1], pdf.limits[1][0], pdf.limits[1][1])) shapes = _region(pdf, pdf.levels[1]) for shape in shapes: patch = PolygonPatch(shape, fc='gray', ec='gray', alpha=0.5, zorder=1) ax.add_patch(patch) plt.xlim([0, 100]) plt.ylim([0, 100]) plt.gca().invert_xaxis()