Example #1
0
    def getDimMax(self, dataDim):

        if dataDim.className == 'SampledDataDim':
            r = float(dataDim.numPoints)
        else:
            converter = UnitConverter.pnt2ppm
            dataDimRef = ExperimentBasic.getPrimaryDataDimRef(dataDim)
            r = converter(1.0, dataDimRef)

        return r
Example #2
0
  def getDimMax(self, spectrum, dim):

    dataDim = spectrum.findFirstDataDim(dim=dim)
    if dataDim.className == 'SampledDataDim':
      r = float(dataDim.numPoints)
    else:
      converter = UnitConverter.pnt2ppm
      dataDimRef = ExperimentBasic.getPrimaryDataDimRef(dataDim)
      r = converter(1.0, dataDimRef)

    return r
Example #3
0
  def convertToPoints(self, spectrum, n, region):

    (rMin, rMax) = region
    dim = n + 1
    dataDim = spectrum.findFirstDataDim(dim=dim)

    if dataDim.className != 'SampledDataDim':
      converter = UnitConverter.ppm2pnt
      dataDimRef = ExperimentBasic.getPrimaryDataDimRef(dataDim)
      (rMin, rMax) = (converter(rMax, dataDimRef), converter(rMin, dataDimRef))
      rMin = max(1, rMin)
      rMax = min(dataDim.numPoints, rMax)
    rMin = rMin - 1
    rMin = int(rMin+0.5)
    rMax = int(rMax+0.5)

    if rMin >= rMax:
      raise Exception('in dimension %d have invalid region' % dim)

    return (rMin, rMax)
Example #4
0
    def convertToPoints(self, dataDims, region):

        pointsRegion = []
        for i, dataDim in enumerate(dataDims):
            rMin, rMax = region[i]

            if dataDim.className != 'SampledDataDim':
                converter = UnitConverter.ppm2pnt
                dataDimRef = ExperimentBasic.getPrimaryDataDimRef(dataDim)
                rMin = converter(rMin, dataDimRef)
                rMax = converter(rMax, dataDimRef)
                rMin = max(1, rMin)
                rMax = min(dataDim.numPoints, rMax)

            rMin = rMin - 1
            rMax = rMax - 1

            if rMin > rMax:
                rMax, rMin = rMin, rMax

            pointsRegion.append((rMin, rMax))

        return pointsRegion
Example #5
0
    def updateSpectrumTable(self, *extra):

        spectrum = self.spectrum

        textMatrix = []
        dataDims = []
        if spectrum:
            dataDims = spectrum.sortedDataDims()

        objectList = []
        for i in range(len(dataDims)):
            dataDim = dataDims[i]
            if isinstance(dataDim, Nmr.FreqDataDim):
                objectList.append(dataDim)
                textMatrix.append([
                    dataDim.dim,
                    ExperimentBasic.getPrimaryDataDimRef(
                        dataDim).expDimRef.isotopeCodes[0],
                    PeakFindParams.getPeakFindMinLinewidth(dataDim),
                    PeakFindParams.getPeakFindBoxwidth(dataDim)
                ])

        self.spectrumMatrix.update(objectList=objectList,
                                   textMatrix=textMatrix)
Example #6
0
    def changeRegionPeakList(self, peakList):

        if peakList is self.regionFindPeakList:
            return

        if peakList and self.regionFindPeakList:
            spectrum1 = peakList.dataSource
            spectrum2 = self.regionFindPeakList.dataSource
            self.regionFindPeakList = peakList

            if spectrum1 is spectrum2:
                return

            if spectrum1.numDim == spectrum2.numDim:
                for n in range(spectrum1.numDim):
                    dim = n + 1
                    dataDim1 = spectrum1.findFirstDataDim(dim=dim)
                    dataDim2 = spectrum2.findFirstDataDim(dim=dim)
                    if dataDim1.className != dataDim2.className:
                        break

                    if (dataDim1.className
                            == 'FreqDataDim') and (dataDim2.className
                                                   == 'FreqDataDim'):
                        if ExperimentBasic.getPrimaryDataDimRef(
                                dataDim1
                        ).expDimRef.isotopeCodes != ExperimentBasic.getPrimaryDataDimRef(
                                dataDim2).expDimRef.isotopeCodes:
                            break
                else:
                    return  # just use what is there already as sensible default
        else:
            self.regionFindPeakList = peakList

        if peakList:
            spectrum = peakList.dataSource
            ndim = spectrum.numDim
            dataDims = spectrum.sortedDataDims()
        else:
            spectrum = None
            ndim = 0
            dataDims = []

        tipTexts = [
            'Whether to include or exclude the states region from region-wide peak picking',
        ]
        headingList = ['Condition']
        textRow = ['include']
        regionMin = []
        regionMax = []
        editWidgets = [self.conditionMenu] + 2 * ndim * [self.regionEntry]
        editGetCallbacks = [self.getCondition]
        editSetCallbacks = [self.setCondition]
        for dataDim in dataDims:
            dim = dataDim.dim
            headingList.extend(['Dim %d Min' % dim, 'Dim %d Max' % dim])
            tipTexts.append(
                'Lower value bound of peak picking inclusion/exclusion region for spectrum dimension %s'
                % dim)
            tipTexts.append(
                'Upper value bound of peak picking inclusion/exclusion region for spectrum dimension %s'
                % dim)
            (rMin, rMax) = self.getWholeRegion(dataDim)
            textRow.append(rMin)
            textRow.append(rMax)
            regionMin.append(rMin)
            regionMax.append(rMax)

            i = dim - 1
            editGetCallbacks.append(lambda row, i=i: self.getRegionMin(row, i))
            editGetCallbacks.append(lambda row, i=i: self.getRegionMax(row, i))
            editSetCallbacks.append(lambda row, i=i: self.setRegionMin(row, i))
            editSetCallbacks.append(lambda row, i=i: self.setRegionMax(row, i))

        condition = RegionCondition('include', regionMin, regionMax)
        objectList = [condition]
        textMatrix = [textRow]
        self.regionConditions = [condition]
        self.regionFindMatrix.update(objectList=objectList,
                                     textMatrix=textMatrix,
                                     headingList=headingList,
                                     tipTexts=tipTexts,
                                     editSetCallbacks=editSetCallbacks,
                                     editGetCallbacks=editGetCallbacks,
                                     editWidgets=editWidgets)
Example #7
0
def initialiseAmideExpts(argServer, hsqc=None, tocsy=None, noesy=None):

    func = ExperimentBasic.getPrimaryDataDimRef

    xDim = 1

    peakSize = (0.09, 0.49)

    ratio = peakSize[0] / peakSize[1]
    tol = peakSize[0] / 5.0

    minPpmH = 6.0
    maxPpmH = 9.84

    waterRegion = [4.90, 4.92]

    #tocsy = argServer.getSpectrum()

    #nhsqcPl = getBlankPeakList(nhsqc)
    #tocsyPl = getBlankPeakList(tocsy)
    #noesyPl = getBlankPeakList(noesy)

    noesyPl = argServer.getPeakList()
    noesy = noesyPl.dataSource

    tocsyPl = argServer.getPeakList()
    tocsy = noesyPl.dataSource

    dataDims = noesy.sortedDataDims()

    if not noesyPl.peaks:
        print "Picking new NOE peaks"
        wholeRegion = [[
            pnt2ppm(dd.numPointsOrig, func(dd)),
            pnt2ppm(0, func(dd))
        ] for dd in dataDims]
        excludeRegion = [[0, dd.numPointsOrig] for dd in dataDims]

        dataDimRef = func(dataDims[xDim])
        excludeRegion[xDim] = [
            ppm2pnt(waterRegion[0], dataDimRef),
            ppm2pnt(waterRegion[1], dataDimRef)
        ]

        findPeaks(
            noesyPl,
            wholeRegion,
            argServer.parent,
            [1, 1, 1],
        )

    if not tocsyPl.peaks:
        print "Picking new TOCSY peaks"
        wholeRegion = [[
            pnt2ppm(dd.numPointsOrig, func(dd)),
            pnt2ppm(0, func(dd))
        ] for dd in dataDims]
        excludeRegion = [[0, dd.numPointsOrig] for dd in dataDims]

        dataDimRef = func(dataDims[xDim])
        excludeRegion[xDim] = [
            ppm2pnt(waterRegion[0], dataDimRef),
            ppm2pnt(waterRegion[1], dataDimRef)
        ]

        findPeaks(
            tocsyPl,
            wholeRegion,
            argServer.parent,
            [1, 1, 1],
        )

    nhsqcPl = argServer.getPeakList()
    nhsqc = nhsqcPl.dataSource

    noise = ExperimentBasic.getNoiseEstimate(nhsqc) * 2.0

    dataDims = nhsqc.sortedDataDims()
    dd0 = dataDims[0]
    dd1 = dataDims[1]

    ddr0 = ExperimentBasic.getPrimaryDataDimRef(dd0)
    ddr1 = ExperimentBasic.getPrimaryDataDimRef(dd1)

    print "Initial NOESY filter"
    amides = []

    allPeaks = list(noesyPl.peaks)
    allPeaks.extend(tocsyPl.peaks)

    for peak in allPeaks:
        peakDims = peak.sortedPeakDims()
        ppm0 = peakDims[0].value
        ppm2 = peakDims[2].value

        pnt0 = ppm2pnt(ppm0, ddr0)
        pnt1 = ppm2pnt(ppm2, ddr1)

        if ppm0 < minPpmH:
            peak.delete()
            continue

        if ppm0 > maxPpmH:
            peak.delete()
            continue

        if (pnt0 - 1 < 0) or (pnt0 > dd0.numPointsOrig):
            peak.delete()
            continue

        if (pnt1 - 1 < 0) or (pnt1 > dd1.numPointsOrig):
            peak.delete()
            continue

        height1 = nhsqc.block_file.getValue((pnt0 - 1, pnt1))
        height2 = nhsqc.block_file.getValue((pnt0, pnt1 - 1))
        height3 = nhsqc.block_file.getValue((pnt0 - 1, pnt1 - 1))
        height4 = nhsqc.block_file.getValue((pnt0, pnt1))

        if height1 < noise:
            peak.delete()
            continue
        if height2 < noise:
            peak.delete()
            continue
        if height3 < noise:
            peak.delete()
            continue
        if height4 < noise:
            peak.delete()
            continue

        if peak.peakList is noesyPl:
            amides.append((peak, ppm0, ppm2))

        peak.ppmH = ppm0
        peak.ppmN = ppm2

    print "Cluster %d amides" % len(amides)
    cluster = {}
    for i in range(len(amides) - 1):
        if i and i % 100 == 0:
            print i
        peak1, ppm0, ppm1 = amides[i]
        if cluster.get(peak1) is None:
            cluster[peak1] = [peak1]

        for j in range(i + 1, len(amides)):
            peak2, ppm2, ppm3 = amides[j]
            if peak1 is peak2:
                continue

            if cluster.get(peak2) is None:
                cluster[peak2] = [peak2]

            if cluster[peak1] == cluster[peak2]:
                continue

            deltaH = ppm2 - ppm0
            deltaN = ratio * (ppm3 - ppm1)
            delta = sqrt((deltaH * deltaH) + (deltaN * deltaN))

            if (delta <= tol):
                cluster[peak1].extend(cluster[peak2])

                for peak3 in cluster[peak2]:
                    cluster[peak3] = cluster[peak1]

    print "Remove isolated peaks"
    clusters2 = {}
    for peak in cluster.keys():
        c = cluster[peak]
        if len(c) < 2:
            peak.delete()
            del c
        else:
            clusters2[c[0]] = c

    clusters = clusters2.values()

    ss = {}
    centres = []
    print "Check for overlapped clusters"
    for peaks in clusters:

        p = peaks[0]
        print 'CLUSTER', p.ppmH, p.ppmN

        for n in range(1):
            print 'Iteration', n
            f = 0.30
            tolF = tol * f

            cluster = {}
            cluster2 = {}
            M = 0
            for i in range(len(peaks) - 1):

                peak1 = peaks[i]
                if cluster.get(peak1) is None:
                    cluster[peak1] = [peak1]
                    cluster2[peak1] = M
                    M += 1

                for j in range(i + 1, len(peaks)):
                    peak2 = peaks[j]
                    if peak1 is peak2:
                        continue

                    if cluster.get(peak2) is None:
                        cluster[peak2] = [peak2]
                        cluster2[peak2] = M
                        M += 1

                    if cluster2[peak1] == cluster2[peak2]:
                        continue

                    deltaH = peak1.ppmH - peak2.ppmH
                    deltaN = ratio * (peak1.ppmN - peak2.ppmN)
                    delta = sqrt((deltaH * deltaH) + (deltaN * deltaN))

                    if delta <= tolF:
                        cluster[peak1].extend(cluster[peak2])

                        for peak3 in cluster[peak2]:
                            cluster[peak3] = cluster[peak1]
                            cluster2[peak3] = cluster2[peak1]

            cluster3 = []
            for i in range(M):
                cluster3.append([])

            for peak in peaks:
                cluster3[cluster2[peak]].append(peak)

            print '  F %3f' % (f),
            for i in range(M):
                N = float(len(cluster3[i]))
                if N > 1.0:
                    aveH = 0.0
                    aveN = 0.0
                    for peak in cluster3[i]:
                        aveH += peak.ppmH
                        aveN += peak.ppmN

                    aveH /= N
                    aveN /= N
                    hsqcPeak = pickPeak(nhsqcPl, (aveH, aveN), unit='ppm')

                    centres.append([hsqcPeak, aveH, aveN])
                    ss[hsqcPeak] = []
                    print len(cluster3[i]),

    print "Assign 15N HSQC"
    #assignAllNewResonances(peaks=nhsqcPl.peaks)
    #assignSpinSystemPerPeak(peaks=nhsqcPl.peaks)

    print "Assign NOESY & TOCSY"
    for peak in allPeaks:
        minDist = tol
        best = centres[0][0]
        for hsqcPeak, aveH, aveN in centres:
            deltaH = peak.ppmH - aveH
            deltaN = ratio * (peak.ppmN - aveN)
            delta = sqrt((deltaH * deltaH) + (deltaN * deltaN))

            if delta < minDist:
                minDist = delta
                best = hsqcPeak

        ss[best].append(peak)

    for hsqcPeak in ss.keys():
        peaks = ss[hsqcPeak]