Ejemplo n.º 1
0
def loadProjDefinitionFile(argServer, inputFileName=None, exptName=None):

    # parse input file
    if inputFileName is None:
        inputFileName = argServer.getFile()

    if inputFileName:
        fileData = genUtil.parseBrukerSummary(inputFileName)
    else:
        return

    # get format specifier
    format = fileData.get('FORMAT')
    if format not in supportedFormats:
        print("ERROR Unknown format; %s. Supported formats: %s" %
              (format, supportedFormats))

    # get OpenSpectrum popup and set up
    top = argServer.parent
    open_spectra = top.popups.get('open_spectrum')
    if open_spectra is None:
        top.openPopup('open_spectrum', OpenSpectrum.OpenSpectrumPopup)
        open_spectra = top.popups.get('open_spectrum')

    open_spectra.verifySelect.setSelected(False)
    open_spectra.sharedExpSelect.setSelected(True)

    # set up data structures
    extraData = {}
    specNames = []
    fileNames = []
    scalingFactors = []
    useScalingFactors = False

    if format == 'Bruker':

        keepDirectories = 4

        dimParNameMap = {
            'NUCLEI': 'displayNames',
            #'SW_ppm':'swppm',
            #'SW_hz':'sw',
            #'O1_ppm':'carppm',
        }

        # set up special data
        numDims = None
        for tagin, tagout in dimParNameMap.items():
            val = fileData.get(tagin)
            if val is not None:
                if numDims is None:
                    numDims = len(val)
                elif numDims != len(val):
                    print("ERROR for %s: numDims inconsistent, should be %d" %
                          (tagin, numDims))
                    return
                extraData[tagout] = val

        # get file and base directory
        ll = []
        for tt in fileData.items():
            try:
                ind = int(tt[0])
                # this is a string form of an integer. Use it
                ll.append((ind, tt))
            except ValueError:
                # not a string form of an integer - skip
                continue

        for xx in sorted(ll):
            tt = xx[1]
            specName, dd = tt
            fileName = dd.get('PATH')
            if fileName:
                specNames.append(str(specName))
                fileNames.append(fileName)
                scalingFactor = dd.get('DEFINITION')
                if scalingFactor:
                    if numDims is None:
                        # always the case first time for now, but the code might change
                        numDims = len(scalingFactor)
                    if len(scalingFactor) == numDims:
                        useScalingFactors = True
                    else:
                        scalingFactor = None
                else:
                    scalingFactor = None
                scalingFactors.append(scalingFactor)

        #indx = list(OpenSpectrum.file_formats).index('Bruker')
        #open_spectra.formatPulldown.setSelectedIndex(indx)
        #open_spectra.chooseFormat(indx,'Bruker')
        open_spectra.formatPulldown.set('Bruker')
        open_spectra.chooseFormat('Bruker')
    else:
        return

    # find and set set selection directory
    #baseDir = uniIo.commonSuperDirectory(*fileNames)

    startDir = os.path.dirname(inputFileName)
    baseDir, paths = uniIo.suggestFileLocations(fileNames, startDir=startDir)

    if baseDir is None:
        print 'WARNING, described files not found. Aborting'

    fileNames = [os.path.join(baseDir, path) for path in paths]

    defaultExpName = os.path.basename(baseDir)
    open_spectra.fileSelect.changeDir(baseDir)

    # set 'Spectra to open' matrix:
    objectList = []
    textMatrix = []

    # get experiment name
    if exptName is None:
        exptName = argServer.askString(message='New Experiment Name',
                                       default=defaultExpName)
        #exptName = defaultExpName

    shiftListName = open_spectra.getShiftLists()[0]
    windowOpt = 1
    for ii, specName in enumerate(specNames):
        path = paths[ii]
        optString = OpenSpectrum.WINDOW_OPTS[windowOpt]
        textMatrix.append([exptName, specName, path, optString, shiftListName])
        objectList.append([exptName, specName, path, windowOpt, shiftListName])

    if len(fileNames) > 1:
        open_spectra.openButton.config(text='Open Spectra')

    else:
        open_spectra.openButton.config(text='Open Spectrum')

    open_spectra.scrolledMatrix.update(objectList=objectList,
                                       textMatrix=textMatrix)

    if useScalingFactors:
        extraData['scalingFactors'] = scalingFactors[0]

    refSpec = open_spectra.openSpectrum(exptName,
                                        specNames[0],
                                        fileNames[0],
                                        extraData=extraData)
    if not refSpec:
        return

    refExp = refSpec.experiment

    # set up internal Analysis data
    open_spectra.parent.finishInitSpectrum(refSpec)
    print 'finished opening spectrum', refExp.name, refSpec.name

    refDataStore = refSpec.dataStore
    preferDataUrls = [refDataStore.dataUrl]
    memopsRoot = refSpec.root
    expDimRefs = [
        xdr for xd in refExp.sortedExpDims() for xdr in xd.sortedExpDimRefs()
    ]

    #
    for ii, fullPath in enumerate(fileNames):
        if ii == 0:
            # skip first
            continue

        if useScalingFactors:
            scalingFactor = scalingFactors[ii]
        else:
            scalingFactor = None

        # get new DataLocationStore
        useDataUrl, filePath = getDataStoringFromFilepath(
            memopsRoot,
            fullPath,
            preferDataUrls=preferDataUrls,
            keepDirectories=keepDirectories)

        # get data file name from entry path
        if format == 'Bruker':
            from ccp.format.bruker.generalIO import getMatrixFilePath
            urlPath = os.path.join(useDataUrl.url.dataLocation, '')
            dataPath = getMatrixFilePath(os.path.join(urlPath, filePath),
                                         refDataStore.numDims)
            path = dataPath[len(urlPath):]
        elif format == 'Azara':
            raise NotImplementedError(
                "Azara type projection series not yet implemented")
        else:
            path = filePath
        copyPars = {'dataUrl': useDataUrl, 'path': path, 'nmrDataSources': []}
        newDataStore = copySubTree(refDataStore,
                                   refDataStore.dataLocationStore,
                                   topObjectParameters=copyPars)

        # get new spectrum
        copyPars = {'name': specNames[ii], 'dataStore': newDataStore}
        newSpectrum = copySubTree(refSpec,
                                  refExp,
                                  topObjectParameters=copyPars)

        if useScalingFactors:
            dataDims = newSpectrum.sortedDataDims()
            lastDataDim = dataDims[-1]
            startAt = len(dataDims) - 1
            for jj in range(len(dataDims) - 1, len(scalingFactor)):
                expDimRef = expDimRefs[jj]
                dimScaling = lastDataDim.findFirstDimensionScaling(
                    expDimRef=expDimRef)
                if dimScaling is None:
                    lastDataDim.newDimensionScaling(
                        scalingFactors=(scalingFactor[jj], ),
                        expDimRef=expDimRef)
                else:
                    dimScaling.scalingFactors = (scalingFactor[jj], )
        # set up internal Analysis data
        open_spectra.parent.visibleSpectra[newSpectrum] = False
        open_spectra.parent.finishInitSpectrum(newSpectrum)
        print 'finished opening spectrum', refExp.name, newSpectrum.name

    print
    print 'Projection spectra loaded into experiment', refExp.name
Ejemplo n.º 2
0
def loadProject(parent, path, projectName=None):

    path = uniIo.normalisePath(path)
    askdir = lambda title, prompt, initial_value: askDir(
        title, prompt, initial_value, parent=parent, extra_dismiss_text='Skip')
    askfile = lambda title, prompt, initial_value: askFile(
        title, prompt, initial_value, parent=parent, extra_dismiss_text='Skip')
    project = genIo.loadProject(path,
                                showWarning=showWarning,
                                askDir=askdir,
                                askFile=askfile)

    # now check dataStores
    # delete those that are not used
    # and otherwise check path to see if exists

    dataStores = []
    for dataLocationStore in project.dataLocationStores:
        for dataStore in dataLocationStore.dataStores:
            if isinstance(dataStore,
                          NumericMatrix) and not dataStore.nmrDataSources:
                print 'deleting dataStore %s with path %s' % (
                    dataStore, dataStore.fullPath)
                dataStore.delete()
            elif isinstance(
                    dataStore,
                    MimeTypeDataStore) and not dataStore.nmrDataSourceImages:
                print 'deleting dataStore %s with path %s' % (
                    dataStore, dataStore.fullPath)
                dataStore.delete()
            else:
                dataStores.append(dataStore)

    badDataStores = [
        dataStore for dataStore in dataStores
        if not os.path.exists(dataStore.fullPath)
    ]

    if badDataStores:
        # find DataUrls involved
        dataUrls = set(dataStore.dataUrl for dataStore in badDataStores)
        startDir = project.packageLocator.findFirstRepository(
        ).url.dataLocation

        for dataUrl in dataUrls:
            if not dataUrl.dataStores.difference(badDataStores):
                # all DataStores for this DataUrl are bad
                # we can make changes without affecting 'good' DataStores

                # Look for an obvious place the data may have moved to
                dataStores = dataUrl.sortedDataStores()
                fullPaths = [dataStore.fullPath for dataStore in dataStores]
                baseDir, newPaths = uniIo.suggestFileLocations(
                    fullPaths, startDir=startDir)

                if baseDir is not None:
                    # We have a file location that fits all missing files.
                    # Change dataStores to use it
                    print 'WARNING, resetting data locations to: \n%s\n' % baseDir

                    ccpGenIo.changeDataStoreUrl(dataStores[0], baseDir)
                    for ii, dataStore in enumerate(dataStores):
                        dataStore.path = newPaths[ii]

        if [
                dataStore for dataStore in dataStores
                if not os.path.exists(dataStore.fullPath)
        ]:
            popup = DataLocationPopup(parent, project, modal=True)
            popup.destroy()

    return project