def saveProject(self): if self.saveCcpn: # Must be saved (and thus also renamed) before it can be packaged genIo.saveProject(self.ccpnProject, newPath=os.path.join(self.dataDir, self.identifier), newProjectName=self.identifier, checkValid=True, removeExisting=True) genIo.packageProject(self.ccpnProject, os.path.join(self.dataDir, self.identifier))
def save(self): projName = self.proj_name_entry.get() directory = self.proj_dir_select.getDirectory() directory = joinPath(directory, projName) if self.isProjectDirectory(directory): if not showOkCancel('Overwrite directory', 'Overwrite existing project directory?', parent=self): return self.updateInfo() self.did_save = False changeDataLocations = self.dataCheckButton.isSelected() done = False try: done = Io.saveProject(self.project, newPath=directory, newProjectName=projName, createFallback=True, showYesNo=showYesNo, changeDataLocations=changeDataLocations, showWarning=showWarning) if done: showInfo('Project saved', 'Project saved successfully') self.did_save = True if self.callback: self.callback(self.project) elif self.modal: return # give another chance except Implementation.ApiError, e: showError('Save project', e.error_msg)
def loadProject(parent, path, projectName=None): path = uniIo.normalisePath(path) askdir = lambda title, prompt, initial_value: askDir( title, prompt, initial_value, parent=parent, extra_dismiss_text='Skip') askfile = lambda title, prompt, initial_value: askFile( title, prompt, initial_value, parent=parent, extra_dismiss_text='Skip') project = genIo.loadProject(path, showWarning=showWarning, askDir=askdir, askFile=askfile) # now check dataStores # delete those that are not used # and otherwise check path to see if exists dataStores = [] for dataLocationStore in project.dataLocationStores: for dataStore in dataLocationStore.dataStores: if isinstance(dataStore, NumericMatrix) and not dataStore.nmrDataSources: print 'deleting dataStore %s with path %s' % ( dataStore, dataStore.fullPath) dataStore.delete() elif isinstance( dataStore, MimeTypeDataStore) and not dataStore.nmrDataSourceImages: print 'deleting dataStore %s with path %s' % ( dataStore, dataStore.fullPath) dataStore.delete() else: dataStores.append(dataStore) badDataStores = [ dataStore for dataStore in dataStores if not os.path.exists(dataStore.fullPath) ] if badDataStores: popup = DataLocationPopup(parent, project, modal=True) popup.destroy() return project
from memops.general import Io as genIo from ccpnmr.integrator.core import Util as intUtil if __name__ == '__main__': """ Make NmrCalc.Run from json file. """ # get input arguments if len(sys.argv) == 3: projectDir, jsonFile = sys.argv[1:3] nmrProjectName = None else: raise Exception("Usage: projectToJson projectDir jsonFilePath") try: memopsRoot = genIo.loadProject(projectDir, suppressGeneralDataDir=True) jsonObject = json.load(open(jsonFile)) tmpFile = jsonFile + '_tmp' json.dump(jsonObject, open(tmpFile, 'w'), sort_keys=True, indent=2) nmrCalcId = intUtil.makeNmrCalc(memopsRoot, jsonObject) memopsRoot.saveModified() jsonObject['CCPN.nmrCalcId'] = nmrCalcId fp = open(jsonFile, 'w') json.dump(jsonObject, fp, sort_keys=True, indent=2) fp.close() except: traceback.print_exc() raise
def makeCcpnProject(entryName): """ Execute conversion to CCPN project """ logFileHandle = None try: #entryName = casdUtil.getEntryName(info) orgName = entryName.split('_')[0] + '_Org' print 'Starting', entryName logFileHandle = casdUtil.createLogFile(entryName, 'extractEntry') # get CCPN project from Org data #orgName = casdUtil.getEntryName(info, isOriginal=True) path = os.path.join(allDataDir, orgName[1:3], orgName) ppath = os.path.join(path, orgName) ff = ppath + '.tgz' if not (os.path.exists(ppath) or os.path.exists(ff)): raise Exception("NO original CCPN project in %s" % path) if not os.path.isdir(ppath): casdUtil.extractCompressedFile(ff, path, entryName) if not os.path.exists(ppath): raise Exception("NO extracted CCPN project in %s" % path) ccpnProject = genIo.loadProject(ppath, suppressGeneralDataDir=True) # neutralize any other pending CASD-NMR projects casdRun = casdUtil.prepareNmrCalcRun(ccpnProject, 'CING') for run in casdRun.nmrCalcStore.findAllRuns(status='pending'): if run is not casdRun: run.status = 'provisional' # dataDir = os.path.join(allDataDir, entryName[1:3], entryName) tmpdir = tempfile.mkdtemp(dir=casdConstants.topTmpDir) try: # Extract structure data tmpstruc = os.path.join(tmpdir, 'structures') src = casdUtil.getInputFile(entryName, 'structures') casdUtil.extractCompressedFile(src, tmpstruc, entryName, okExts=('pdb', )) tmpstruc = casdUtil.getLowestSubDir(tmpstruc, followDirs=('cns_format', )) structureFiles = os.listdir(tmpstruc) # Extract restraint data tmprestr = os.path.join(tmpdir, 'restraints') src = casdUtil.getInputFile(entryName, 'restraints', ignoreErrors=True) if src: casdUtil.extractCompressedFile(src, tmprestr, entryName) tmprestr = casdUtil.getLowestSubDir( tmprestr, followDirs=('cns_format', )) restraintFiles = os.listdir(tmprestr) else: restraintFiles = () print 'WARNING, %s no restraints found at %s' % (entryName, src) # read in data # FormatConverter version fcw = FormatConverterWrapper(ccpnProject=ccpnProject) # dataIo version #fcw = None if structureFiles: # NBNB uses Rasmus in-development trunk structure reading. # WOrks well. Temporarily disabled # read in structures pdbFiles = [ x for x in structureFiles if any( x.endswith(y) for y in casdConstants.pdbEndings) ] floatFiles = [ x for x in structureFiles if x.endswith('.float') ] if floatFiles: # Use only pdb files with names that match float files stems = set(x[:-6] for x in floatFiles) pdbFiles = [x for x in pdbFiles if x[:-4] in stems] pdbPaths = [os.path.join(tmpstruc, x) for x in pdbFiles] if True: #if fcw is None: #Always use dataIo version # dataIo version ensemble = StructureIo.getStructureFromFiles( ccpnProject.findFirstMolSystem(), pdbPaths) if ensemble is None: print '### Skipping %s, no structures loaded' % entryName else: print '### num files, ensemble', len( pdbPaths), ensemble.ensembleId casdRun.newStructureEnsembleData( name=entryName, structureEnsemble=ensemble) else: # FormatConverter version #fileInfo = fcw.determineFileInfo(pdbPaths[0]) if len(pdbPaths) != 1: print 'WARNING %s pdb files, only one read. TBD FIX' % len( pdbPaths) dataType = 'coordinates' formatName = 'pseudoPdb' pdbPath = pdbPaths[0] print 'Reading structure file', dataType, formatName, pdbPath fcw.readFile(dataType, formatName, pdbPath) # NBNB TODO 1) How to set up trying true PDB before pseudoPdb? # 2) How to read several files into an ensemble? # 3) How to get hold of the new ensemble for putting in NmrCalc else: print '### Skipping %s, no structure file' % entryName # Make NmrCalc object for shift list # NBNB consider later: if we are reading in assigned peaks, # shifts may change. NBNB shiftLists = casdRun.nmrCalcStore.nmrProject.findAllMeasurementLists( className='ShiftList') if len(shiftLists) == 1: casdRun.newMeasurementListData( name='Shiftlist', measurementList=shiftLists.pop()) else: print 'WARNING. %s shift lists found, should be s' % len( shiftLists) # Restraints reading if restraintFiles: if fcw is None: # NBNB TBD dataIo restraint reading to go here #for rfile in restraintFiles: # fileInfo = casdUtil.getFileInfo(tmprestr, rfile) pass else: # FormatConverter version restraintLists = [] for rfile in restraintFiles: rpath = os.path.join(tmprestr, rfile) fileInfo = fcw.determineFileInfo(rpath) dataType = fileInfo.get('dataType') formatName = fileInfo.get('formatName') if dataType is None or formatName is None: print 'Skipping unidentified restraint file', dataType, formatName, rfile elif dataType not in ( 'distanceConstraints', 'dihedralConstraints', 'rdcConstraints', ): print 'Skipping wrong type of restraint file', dataType, formatName, rfile else: print 'Reading restraint file', dataType, formatName, rfile fcw.readFile(dataType, formatName, rpath) if fcw.conversionSuccess: print("Successful restraint file read:\n%s" % fcw.conversionInfo) restraintLists.append(fcw.ccpnObjectOrList) else: print("Failed restraint file read:\n%s" % fcw.conversionInfo) if restraintLists: print("Found restraint lists: %s" % len(restraintLists)) casdRun.newConstraintStoreData( constraintLists=restraintLists, name='Restraintlists') # linkResonances print '### linking resonances' linkingInfo = fcw.linkAllResonancesToAtoms() finally: shutil.rmtree(tmpdir, ignore_errors=True) pass # rename and package project ccpnOutputDir = os.path.join(dataDir, entryName) genIo.saveProject(ccpnProject, newPath=ccpnOutputDir, newProjectName=entryName, checkValid=True, removeExisting=True) genIo.packageProject(ccpnProject, ccpnOutputDir) shutil.rmtree(ccpnOutputDir) ccpnOutputPath = ccpnOutputDir + '.tgz' print 'SUCCESS, %s saved to %s' % (entryName, ccpnOutputPath) return ccpnOutputPath except: print 'ERROR for %s' % (entryName) traceback.print_exc(file=sys.stdout) finally: if logFileHandle is not None: logFileHandle.close() sys.stdout = sys.__stdout__
def __init__(self, guiParent, project, callback=None, help_msg='', help_url='', dismiss_text='', modal=False, *args, **kw): self.project = project self.callback = callback self.help_msg = help_msg self.help_url = help_url self.dismiss_text = dismiss_text self.modal = modal self.did_save = False Frame.__init__(self, guiParent, *args, **kw) projDir = Io.getUserDataPath(self.project) guiParent.grid_columnconfigure(1, weight=1) row = 0 label = Label(guiParent, text='Project Name:') label.grid(row=row, column=0, sticky=Tkinter.E) self.proj_name_entry = Entry( guiParent, text=self.project.name, returnCallback=self.updateInfo, leaveCallback=self.updateInfo, tipText='The name used for the project save directory') self.proj_name_entry.grid(row=row, column=1, sticky=Tkinter.EW) row = row + 1 label = Label(guiParent, text='Project Directory:') label.grid(row=row, column=0, sticky=Tkinter.E) label = self.proj_dir_label = Label(guiParent, text=projDir) label.grid(row=row, column=1, sticky=Tkinter.W) row = row + 1 label = Label( guiParent, text='Note: Project Directory = Save Location + Project Name') label.grid(row=row, column=1, sticky=Tkinter.W) text = 'Save binary data with project' tipText = 'Copy data files (e.g. for spectra) into new project directory if not already in current project directory: careful, this can take some time' row = row + 1 self.dataCheckButton = CheckButton(guiParent, text=text, tipText=tipText) self.dataCheckButton.grid(row=row, column=0, columnspan=2, sticky=Tkinter.W) row = row + 1 guiParent.grid_rowconfigure(row, weight=1) labelFrame = LabelFrame(guiParent, text='Save Location') labelFrame.grid(row=row, column=0, columnspan=2, sticky=Tkinter.NSEW) labelFrame.grid_rowconfigure(0, weight=1) labelFrame.grid_columnconfigure(0, weight=1) directory = os.path.dirname(projDir) self.proj_dir_select = FileSelect( labelFrame, directory=directory, select_dir_callback=self.selectDir, change_dir_callback=self.updateInfo, should_change_dir_callback=self.shouldChangeDir, getRowColor=self.getEntryColor, show_file=False) self.proj_dir_select.grid(row=0, column=0, sticky=Tkinter.NSEW) row = row + 1 texts = ['Save'] tipTexts = ['Save project with specified name in specified directory'] commands = [self.save] buttons = createDismissHelpButtonList(guiParent, texts=texts, tipTexts=tipTexts, commands=commands, help_msg=self.help_msg, help_url=self.help_url, dismiss_text=self.dismiss_text, expands=True) buttons.grid(row=row, column=0, columnspan=2, sticky=Tkinter.EW)
def getOldData(oldPath, oldTag, currentVersionStr, oldName=None): import os from memops.universal import Io as uniIo from memops.metamodel.Util import topologicalSortSubgraph # find old implementation top directory and change temporarily to it. currentTopDir = uniIo.getTopDirectory() ss1, lastdir = os.path.split(currentTopDir) ss2, newTag = os.path.split(ss1) prevTopDir = os.path.join(ss2, oldTag, lastdir) uniIo.setTopDirectory(prevTopDir) # NB these things are put inside the function to avoid problems with # the uniIo.setTopDirectory approach def getCurrentVersionStr(): """ Encapsulated to get import out of main. Hoping to fix import problem. """ from memops.general.Constants import currentModelVersion return str(currentModelVersion) def downlinkTagsByImport(root, sorter=topologicalSortSubgraph): """ downlink tags from MemopsRoot object for leafPackages in import order This is the post-v2.0 version """ leafPackages = [] packages = [root.metaclass.container.topPackage()] for pp in packages: childPackages = pp.containedPackages if childPackages: packages.extend(childPackages) else: leafPackages.append(pp) # sort leafPackages by import (imported before importing) leafPackages = sorter(leafPackages, 'accessedPackages') tags = [] for pp in leafPackages: cc = pp.topObjectClass if cc is not None: pr = cc.parentRole if pr is not None: tags.append(pr.otherRole.name) # return tags try: # get old version as string oldVersionStr = getCurrentVersionStr() from memops.general import Io as oldIo # post-version-2 behaviour - get repository dir and project name if oldName is None: head, tail = os.path.split(oldPath) oldName = os.path.splitext(tail)[0] oldPath = os.path.dirname(os.path.dirname(head)) # get root Object and force load all contents oldRoot = oldIo.loadProject(oldPath, projectName=oldName) if oldVersionStr != currentVersionStr: # We do have to convert - force load all TopObjects tags = downlinkTagsByImport(oldRoot) for tag in tags: for topObj in getattr(oldRoot, tag): if not topObj.isLoaded: topObj.load() finally: # set path back to current uniIo.setTopDirectory(currentTopDir) return (oldRoot, oldVersionStr, tags)
atomNameMap = { 'HN':'H', } if __name__ == '__main__': """ Hack for rough creation, given that neither comaptibility nor PDB load works """ pdbFile = '6628.pdb' talosFile = '6628.tab' molName = 'myMol' chainCode = 'A' workingDir = os.getcwd() project = genIo.newProject('IsdInputTest') nmrProject = project.newNmrProject(name=molName) shiftList = nmrProject.newShiftList(name=molName) project.newAnalysisProject(name='dummy', nmrProject=nmrProject) # read Talos project path = uniIo.joinPath(workingDir, talosFile) if os.path.isfile(path): # get sequence data seqReader = TalosSequenceFile(path) seqReader.read() seqData = [(x.code1Letter, x.seqCode, x.seqInsertCode) for x in seqReader.sequences[-1].elements] del seqReader
def loadProject(parent, path, projectName=None): path = uniIo.normalisePath(path) askdir = lambda title, prompt, initial_value: askDir( title, prompt, initial_value, parent=parent, extra_dismiss_text='Skip') askfile = lambda title, prompt, initial_value: askFile( title, prompt, initial_value, parent=parent, extra_dismiss_text='Skip') project = genIo.loadProject(path, showWarning=showWarning, askDir=askdir, askFile=askfile) # now check dataStores # delete those that are not used # and otherwise check path to see if exists dataStores = [] for dataLocationStore in project.dataLocationStores: for dataStore in dataLocationStore.dataStores: if isinstance(dataStore, NumericMatrix) and not dataStore.nmrDataSources: print 'deleting dataStore %s with path %s' % ( dataStore, dataStore.fullPath) dataStore.delete() elif isinstance( dataStore, MimeTypeDataStore) and not dataStore.nmrDataSourceImages: print 'deleting dataStore %s with path %s' % ( dataStore, dataStore.fullPath) dataStore.delete() else: dataStores.append(dataStore) badDataStores = [ dataStore for dataStore in dataStores if not os.path.exists(dataStore.fullPath) ] if badDataStores: # find DataUrls involved dataUrls = set(dataStore.dataUrl for dataStore in badDataStores) startDir = project.packageLocator.findFirstRepository( ).url.dataLocation for dataUrl in dataUrls: if not dataUrl.dataStores.difference(badDataStores): # all DataStores for this DataUrl are bad # we can make changes without affecting 'good' DataStores # Look for an obvious place the data may have moved to dataStores = dataUrl.sortedDataStores() fullPaths = [dataStore.fullPath for dataStore in dataStores] baseDir, newPaths = uniIo.suggestFileLocations( fullPaths, startDir=startDir) if baseDir is not None: # We have a file location that fits all missing files. # Change dataStores to use it print 'WARNING, resetting data locations to: \n%s\n' % baseDir ccpGenIo.changeDataStoreUrl(dataStores[0], baseDir) for ii, dataStore in enumerate(dataStores): dataStore.path = newPaths[ii] if [ dataStore for dataStore in dataStores if not os.path.exists(dataStore.fullPath) ]: popup = DataLocationPopup(parent, project, modal=True) popup.destroy() return project