def test_convertMovie(self): """Check movie conversion""" movFn = self.dsFormat.getFile('qbeta/qbeta.mrc') + ":mrcs" ih = emlib.image.ImageHandler() # Check that we can read the dimensions of the dm4 file: EXPECTED_SIZE = (4096, 4096, 1, 7) EXPECTED_DT = emlib.DT_USHORT self.assertEqual(ih.getDimensions(movFn), EXPECTED_SIZE) self.assertEqual(ih.getDataType(movFn), EXPECTED_DT) outFn = join('/tmp/qbeta_converted.mrcs') ih.convertStack(movFn, outFn, 2, 6) self.assertTrue(os.path.exists(outFn)) self.assertTrue(pwutils.getFileSize(outFn) > 0) self.assertEqual(ih.getDimensions(outFn), (4096, 4096, 1, 5)) self.assertEqual(ih.getDataType(outFn), EXPECTED_DT) if pwutils.envVarOn(SCIPION_DEBUG_NOCLEAN): print("Not cleaning output movie: ", outFn) else: pwutils.cleanPath(outFn)
def _drawNode(self, node): """ Allocate node with x=0 and y=0. """ try: parents = node.getParents() if not parents: print "EMPTY NODE ask JM" return maxParent = parents[0] for p in parents[1:]: if p.y > maxParent.y: maxParent = p siblings = maxParent.getChilds() if len(siblings) == 1: node.x = maxParent.x node.y = maxParent.y + self.DY else: rightSibling = siblings[0] for s in siblings: if s.x > rightSibling.x: rightSibling = s node.x = rightSibling.x + rightSibling.width / 2 + self.DX + node.width / 2 node.y = rightSibling.y except Exception as e: from pyworkflow.utils import envVarOn if envVarOn('SCIPION_DEBUG'): print "Can't draw node: %s" % node, e import traceback traceback.print_stack() else: # Do nothing return
def getNewFiles(self): """ Check if there are new files matching the pattern. Return an empty list if there is no files but some were found. Return None if no more files found anymore. """ if self.finished: return None inputFiles = glob(self.filesPattern) newFiles = [] someNew = False for f in inputFiles: if f not in self.foundFiles: someNew = True mTime = datetime.fromtimestamp(os.path.getmtime(f)) delta = datetime.now() - mTime if pwutils.envVarOn('SCIPION_DEBUG'): print "Checking file: '%s' (%s) wait (%s)" % (f, delta.seconds, self.fileWait.seconds) if delta > self.fileWait: newFiles.append(f) self.foundFiles[f] = True if not someNew and datetime.now() - self.startTime > self.wait: self.finished = True return None return newFiles
def getNewFiles(self): """ Check if there are new files matching the pattern. Return an empty list if there is no files but some were found. Return None if no more files found anymore. """ if self.finished: return None inputFiles = glob(self.filesPattern) newFiles = [] someNew = False for f in inputFiles: if f not in self.foundFiles: someNew = True mTime = datetime.fromtimestamp(os.path.getmtime(f)) delta = datetime.now() - mTime if pwutils.envVarOn('SCIPION_DEBUG'): print "Checking file: '%s' (%s) wait (%s)" % ( f, delta.seconds, self.fileWait.seconds) if delta > self.fileWait: newFiles.append(f) self.foundFiles[f] = True if not someNew and datetime.now() - self.startTime > self.wait: self.finished = True return None return newFiles
def _cleanMovieFolder(self, movieFolder): if pwutils.envVarOn(SCIPION_DEBUG_NOCLEAN): self.info('Clean movie data DISABLED. ' 'Movie folder will remain in disk!!!') else: self.info("Erasing.....movieFolder: %s" % movieFolder) os.system('rm -rf %s' % movieFolder)
def _drawNode(self, node): """ Allocate node with x=0 and y=0. """ try: parents = node.getParents() if not parents: print "EMPTY NODE ask JM" return maxParent = parents[0] for p in parents[1:]: if p.y > maxParent.y: maxParent = p siblings = maxParent.getChilds() if len(siblings) == 1: node.x = maxParent.x node.y = maxParent.y + self.DY else: rightSibling = siblings[0] for s in siblings: if s.x > rightSibling.x: rightSibling = s node.x = rightSibling.x + rightSibling.width/2 + self.DX + node.width/2 node.y = rightSibling.y except Exception as e: from pyworkflow.utils import envVarOn if envVarOn('SCIPION_DEBUG'): print "Can't draw node: %s" % node, e import traceback traceback.print_stack() else: # Do nothing return
def _openExternal(self): """ Open a new window with an external viewer. """ if envVarOn('SCIPION_EXTERNAL_VIEWER'): if not self.taList: return openTextFileEditor(self.taList[max(self.getIndex(), 0)].filename) else: showTextFileViewer("File viewer", self.fileList, self.windows)
def getEntryFromWebservice(self,uuid): if not pwutils.envVarOn('SCIPION_NOTIFY'): return urlName = os.environ.get('SCIPION_NOTIFY_URL').strip() #remove last directory urlName = os.path.split(urlName)[0] url = urlName + "/?project_uuid=" + uuid resultDict = self._sendData(url)
def getEntryFromWebservice(self, uuid): if not pwutils.envVarOn('SCIPION_NOTIFY'): return urlName = os.environ.get('SCIPION_NOTIFY_URL').strip() #remove last directory urlName = os.path.split(urlName)[0] url = urlName + "/?project_uuid=" + uuid resultDict = self._sendData(url)
def find_ispyb_path(self, input_file): """ Given a visit, find the path where png images should be stored. """ if pwutils.envVarOn('SCIPIONBOX_ISPYB_ON'): p = realpath(join(self.project_path, input_file)) while p and not p.endswith(self.visit): p = dirname(p) return join(p, '.ispyb') else: return self.protocol._getExtraPath()
def find_ispyb_path(self, input_file): """ Given a visit, find the path where png images should be stored. """ if pwutils.envVarOn('SCIPIONBOX_ISPYB_ON'): p = realpath(join(self.project.path, input_file)) while p and not p.endswith(self.visit): p = dirname(p) return join(p, '.ispyb') else: return self.protocol._getExtraPath()
def onExportTreeGraph(self): runsGraph = self.project.getRunsGraph(refresh=True) useId = not pwutils.envVarOn('SCIPION_TREE_NAME') dotStr = runsGraph.printDot(useId=useId) with tempfile.NamedTemporaryFile(suffix='.gv', mode="w") as dotFile: dotFile.write(dotStr) dotFile.flush() openTextFileEditor(dotFile.name) if useId: print("\nexport SCIPION_TREE_NAME=1 # to use names instead of ids") else: print("\nexport SCIPION_TREE_NAME=0 # to use ids instead of names")
def onExportTreeGraph(self): runsGraph = self.project.getRunsGraph(refresh=True) useId = not pwutils.envVarOn('SCIPION_TREE_NAME') dotStr = runsGraph.printDot(useId=useId) with tempfile.NamedTemporaryFile(suffix='.gv') as dotFile: dotFile.write(dotStr) dotFile.flush() openTextFileEditor(dotFile.name) if useId: print "\nexport SCIPION_TREE_NAME=1 # to use names instead of ids" else: print "\nexport SCIPION_TREE_NAME=0 # to use ids instead of names"
def notifyWorkflow(self): try: #check if enviroment exists otherwise abort if not pwutils.envVarOn('SCIPION_NOTIFY'): return # Check the seconds range of the notify, by default one day seconds = int(os.environ.get('SCIPION_NOTIFY_SECONDS', '86400')) if self._modifiedBefore( seconds): # notify not more than once a day #print "sec, no notification", seconds return # INFO: now we are only sending the protocols names in the project. # We could pass namesOnly=False to get the full workflow template projectWorfklow = self.project.getProtocolsJson(namesOnly=True) #if list with workflow has not been altered do not sent it if not self._dataModified(projectWorfklow): #print "No change: Do not send new data" return else: # For compatibility with version 1.0 check # if Log directory exists. If it does not # create it #TODO REMOVE this check in scipion 1.3 dataFile = self._getDataFileName() # create the folder of the file path if not exists pwutils.makeFilePath(dataFile) with open(dataFile, 'w') as f: f.write(projectWorfklow) #print "change send new data" dataDict = { 'project_uuid': self._getUuid(), 'project_workflow': projectWorfklow } urlName = os.environ.get( 'SCIPION_NOTIFY_URL', config.SCIPION_STATS_WORKFLOW_APP).strip() urlName += "addOrUpdateWorkflow/" t = threading.Thread( name="notifier", target=lambda: self._sendData(urlName, dataDict)) t.start() # will execute function in a separate thread except Exception as e: print "Can't report usage: ", e
def newProtocol(cls, protocolClass, **kwargs): """ Create new protocols instances through the project and return a newly created protocol of the given class """ # Try to continue from previous execution if pwutils.envVarOn('SCIPION_TEST_CONTINUE'): candidates = cls.proj.mapper.selectByClass(protocolClass.__name__) if candidates: c = candidates[0] if c.isFinished(): setattr(c, '_run', False) else: c.runMode.set(MODE_RESTART) return c return cls.proj.newProtocol(protocolClass, **kwargs)
def _createConnection(self, dbName, timeout): """Establish db connection""" self._dbName = dbName if self._reuseConnections and dbName in self.OPEN_CONNECTIONS: self.connection = self.OPEN_CONNECTIONS[dbName] else: self.connection = sqlite.Connection(dbName, timeout, check_same_thread=False) self.connection.row_factory = sqlite.Row self.OPEN_CONNECTIONS[dbName] = self.connection self.cursor = self.connection.cursor() # Define some shortcuts functions if envVarOn('SCIPION_DEBUG_SQLITE'): self.executeCommand = self._debugExecute else: self.executeCommand = self.cursor.execute self.commit = self.connection.commit
def getDataSet(cls, name): """ This method is called every time the dataset want to be retrieved """ assert name in cls._datasetDict, "Dataset: %s dataset doesn't exist." % name ds = cls._datasetDict[name] folder = ds.folder url = '' if ds.url is None else ' -u ' + ds.url if not pwutils.envVarOn('SCIPION_TEST_NOSYNC'): command = ("%s %s --download %s %s" % (pw.PYTHON, pw.getSyncDataScript(), folder, url)) print(">>>> %s" % command) os.system(command) return cls._datasetDict[name]
def _createConnection(self, dbName, timeout): """Establish db connection""" self._dbName = dbName if self._reuseConnections and dbName in self.OPEN_CONNECTIONS: self.connection = self.OPEN_CONNECTIONS[dbName] else: self.connection = sqlite.Connection(dbName, timeout, check_same_thread=False) self.connection.row_factory = sqlite.Row self.OPEN_CONNECTIONS[dbName] = self.connection self.cursor = self.connection.cursor() # Define some shortcuts functions if envVarOn(SCIPION_DEBUG_SQLITE): self.executeCommand = self._debugExecute else: self.executeCommand = self.cursor.execute self.commit = self.connection.commit
def extractParticlesStep(self, micId, baseMicName, fnCTF, micrographToExtract, micOps, doInvert, normalizeArgs, doBorders): """ Extract particles from one micrograph """ outputRoot = str(self._getExtraPath(baseMicName)) fnPosFile = self._getExtraPath(baseMicName + ".pos") # If it has coordinates extract the particles particlesMd = 'particles@%s' % fnPosFile boxSize = self.boxSize.get() boxScale = self.getBoxScale() print "boxScale: ", boxScale if exists(fnPosFile): # Apply first all operations required for the micrograph for program, args in micOps: self.runJob(program, args) args = " -i %s --pos %s" % (micrographToExtract, particlesMd) args += " -o %s --Xdim %d" % (outputRoot, boxSize) if doInvert: args += " --invert" if fnCTF: args += " --ctfparam " + fnCTF if doBorders: args += " --fillBorders" self.runJob("xmipp_micrograph_scissor", args) # Normalize if normalizeArgs: self.runJob('xmipp_transform_normalize', '-i %s.stk %s' % (outputRoot, normalizeArgs)) else: self.warning("The micrograph %s hasn't coordinate file! " % baseMicName) self.warning("Maybe you picked over a subset of micrographs") # Let's clean the temporary mrc micrographs if not pwutils.envVarOn("SCIPION_DEBUG_NOCLEAN"): pwutils.cleanPattern(self._getTmpPath(baseMicName) + '*')
def _createISPyBProcess(self, db): import pyworkflow.utils as pwutils if pwutils.envVarOn('SCIPIONBOX_ISPYB_ON'): cmd = ('source /etc/profile.d/modules.sh;' 'module unload python/ana;' 'module load python/ana;' 'module unload ispyb-api/ana;' 'module load ispyb-api/ana;') else: cmd = '' cmd += 'python %s %s' % (SCRIPT, db) print "** Running: '%s'" % cmd self.proc = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
def notifyWorkflow(self): try: #check if enviroment exists otherwise abort if not pwutils.envVarOn('SCIPION_NOTIFY'): return # Check the seconds range of the notify, by default one day seconds = int(os.environ.get('SCIPION_NOTIFY_SECONDS', '86400')) if self._modifiedBefore(seconds): # notify not more than once a day #print "sec, no notification", seconds return # INFO: now we are only sending the protocols names in the project. # We could pass namesOnly=False to get the full workflow template projectWorfklow = self.project.getProtocolsJson(namesOnly=True) #if list with workflow has not been altered do not sent it if not self._dataModified(projectWorfklow): #print "No change: Do not send new data" return else: # For compatibility with version 1.0 check # if Log directory exists. If it does not # create it #TODO REMOVE this check in scipion 1.3 dataFile = self._getDataFileName() # create the folder of the file path if not exists pwutils.makeFilePath(dataFile) with open(dataFile,'w') as f: f.write(projectWorfklow) #print "change send new data" dataDict = {'project_uuid': self._getUuid(), 'project_workflow': projectWorfklow} urlName = os.environ.get('SCIPION_NOTIFY_URL', config.SCIPION_STATS_WORKFLOW_APP).strip() urlName += "addOrUpdateWorkflow/" t = threading.Thread(target=lambda: self._sendData(urlName, dataDict)) t.start() # will execute function in a separate thread except Exception as e: print "Can't report usage: ", e
def load_vectors(cmm_file, vectors_str, distances_str, angpix): """ Load subparticle vectors either from Chimera CMM file or from a vectors string. Distances can also be specified for each vector in the distances_str. """ if cmm_file: subparticle_vector_list = vectors_from_cmm(cmm_file, angpix) else: subparticle_vector_list = vectors_from_string(vectors_str) if float(distances_str) > 0.0: # Change distances from A to pixel units subparticle_distances = [ float(x) / angpix for x in distances_str.split(',') ] if len(subparticle_distances) != len(subparticle_vector_list): raise Exception("Error: The number of distances does not match " "the number of vectors!") for vector, distance in zip(subparticle_vector_list, subparticle_distances): if distance > 0: vector.set_length(distance) else: vector.compute_length() else: for vector in subparticle_vector_list: vector.compute_length() print("Using vectors:") for subparticle_vector in subparticle_vector_list: subparticle_vector.compute_unit_vector() subparticle_vector.compute_matrix() # print only is debugging if pwutils.envVarOn(SCIPION_DEBUG_NOCLEAN): subparticle_vector.print_vector() return subparticle_vector_list
def openTextFile(filename): """ Open a text file with an external or default viewer. """ if envVarOn('SCIPION_EXTERNAL_VIEWER'): openTextFileEditor(filename) else: showTextFileViewer("File viewer", [filename])
def evaluateSingleMicrograph(self, mic): micFn = mic.getFileName() micBase = self._getMicBase(mic) micDir = self._getMicrographDir(mic) def _getStr(key): return str(self._getFileName(key, micBase=micBase, root=micDir)) fnCTF = _getStr('ctfParam') mdCTFparam = md.MetaData(fnCTF) objId = mdCTFparam.firstObject() mdCTFparam.setValue(md.MDL_MICROGRAPH, micFn, objId) mdCTFparam.setValue(md.MDL_PSD, _getStr('psd'), objId) mdCTFparam.setValue(md.MDL_PSD_ENHANCED, _getStr('enhanced_psd'), objId) mdCTFparam.setValue(md.MDL_CTF_MODEL, _getStr('ctfParam'), objId) mdCTFparam.setValue(md.MDL_IMAGE1, _getStr('ctfmodel_quadrant'), objId) mdCTFparam.setValue(md.MDL_IMAGE2, _getStr('ctfmodel_halfplane'), objId) fnEval = _getStr('ctf') mdCTFparam.write(fnEval) # Evaluate if estimated ctf is good enough try: self.runJob("xmipp_ctf_sort_psds", "-i %s" % fnEval) except Exception: pass fnRejected = _getStr('rejected') # Check if it is a good PSD criterion = self._criterion_psd self.runJob( "xmipp_metadata_utilities", '-i %s --query select "%s" -o %s' % (fnEval, criterion, fnRejected)) if not isMdEmpty(fnRejected): mdCTFparam = md.MetaData(fnEval) mdCTFparam.setValue(md.MDL_ENABLED, -1, mdCTFparam.firstObject()) mdCTFparam.write(fnEval) return False #Check if it is a good CTF estimation if self.findPhaseShift: criterion = self._criterion_phaseplate else: criterion = self._criterion_estimation self.runJob( "xmipp_metadata_utilities", '-i %s --query select "%s" -o %s' % (fnEval, criterion, fnRejected)) retval = True if not isMdEmpty(fnRejected): retval = False mdCTFparam = md.MetaData(fnEval) mdCTFparam.setValue(md.MDL_ENABLED, -1, mdCTFparam.firstObject()) mdCTFparam.write(fnEval) """This method indicates which criteria is rejecting the estimated CTF""" if pwutils.envVarOn('SCIPION_DEBUG'): self.checkRejectedCriteria(fnEval, fnRejected) return retval
ALL_TUTORIALS = OrderedDict([('intro', TutorialIntro), ('betagal', TutorialBetagal)]) if __name__ == '__main__': def printUsage(msg): if msg: print "ERROR: ", msg print "\nUSAGE: scipion tutorial [TUTORIAL_NAME]" print "\nwhere TUTORIAL_NAME can be:" print "\n".join([' %s' % k for k in ALL_TUTORIALS.keys()]) if pwutils.envVarOn('SCIPION_DEBUG'): # Add callback for remote debugging if available. try: from rpdb2 import start_embedded_debugger from signal import signal, SIGUSR2 signal(SIGUSR2, lambda sig, frame: start_embedded_debugger('a')) except ImportError: pass if len(sys.argv) == 2: manager = Manager() tutorialName = sys.argv[1] if not tutorialName in ALL_TUTORIALS: printUsage("Invalid tutorial '%s'." % tutorialName) else:
def processMovieStep(self, movieDict, hasAlignment): movie = Movie() movie.setAcquisition(Acquisition()) if hasAlignment: movie.setAlignment(MovieAlignment()) movie.setAttributesFromDict(movieDict, setBasic=True, ignoreMissing=True) movieFolder = self._getOutputMovieFolder(movie) movieFn = movie.getFileName() movieName = basename(movieFn) if (self.isContinued() and os.path.exists(self._getMovieDone(movie))): self.info("Skipping movie: %s, seems to be done" % movieFn) return # Clean old finished files pwutils.cleanPath(self._getMovieDone(movie)) if self._filterMovie(movie): pwutils.makePath(movieFolder) pwutils.createLink(movieFn, join(movieFolder, movieName)) if movieName.endswith('bz2'): newMovieName = movieName.replace('.bz2', '') # We assume that if compressed the name ends with .mrc.bz2 if not exists(newMovieName): self.runJob('bzip2', '-d -f %s' % movieName, cwd=movieFolder) elif movieName.endswith('tbz'): newMovieName = movieName.replace('.tbz', '.mrc') # We assume that if compressed the name ends with .tbz if not exists(newMovieName): self.runJob('tar', 'jxf %s' % movieName, cwd=movieFolder) elif movieName.endswith('.tif'): #FIXME: It seems that we have some flip problem with compressed # tif files, we need to check that newMovieName = movieName.replace('.tif', '.mrc') # we assume that if compressed the name ends with .tbz if not exists(newMovieName): self.runJob('tif2mrc', '%s %s' % (movieName, newMovieName), cwd=movieFolder) elif movieName.endswith('.txt'): # Support a list of frame as a simple .txt file containing # all the frames in a raw list, we could use a xmd as well, # but a plain text was choose to simply its generation movieTxt = os.path.join(movieFolder, movieName) with open(movieTxt) as f: movieOrigin = os.path.basename(os.readlink(movieFn)) newMovieName = movieName.replace('.txt', '.mrcs') ih = ImageHandler() for i, line in enumerate(f): if line.strip(): inputFrame = os.path.join(movieOrigin, line.strip()) ih.convert( inputFrame, (i + 1, os.path.join(movieFolder, newMovieName))) else: newMovieName = movieName if (self.CONVERT_TO_MRC and not (newMovieName.endswith("mrc") or newMovieName.endswith("mrcs"))): inputMovieFn = os.path.join(movieFolder, newMovieName) if inputMovieFn.endswith('.em'): inputMovieFn += ":ems" newMovieName = pwutils.replaceExt(newMovieName, self.CONVERT_TO_MRC) outputMovieFn = os.path.join(movieFolder, newMovieName) self.info("Converting movie '%s' -> '%s'" % (inputMovieFn, outputMovieFn)) ImageHandler().convertStack(inputMovieFn, outputMovieFn) # Just store the original name in case it is needed in _processMovie movie._originalFileName = pwobj.String(objDoStore=False) movie._originalFileName.set(movie.getFileName()) # Now set the new filename (either linked or converted) movie.setFileName(os.path.join(movieFolder, newMovieName)) self.info("Processing movie: %s" % movie.getFileName()) self._processMovie(movie) if pwutils.envVarOn('SCIPION_DEBUG_NOCLEAN'): self.info('Clean movie data DISABLED. ' 'Movie folder will remain in disk!!!') else: self.info("Erasing.....movieFolder: %s" % movieFolder) os.system('rm -rf %s' % movieFolder) # cleanPath(movieFolder) # Mark this movie as finished open(self._getMovieDone(movie), 'w').close()
def createOutputStep(self): inputMovies = self.getInputMovies() nFrames = inputMovies.getFirstItem().getNumberOfFrames() inputParts = self.getParticles() movieParticles = self._createSetOfMovieParticles() movieParticles.copyInfo(inputParts) movieParticles.setSamplingRate(self._getNewSampling()) self.lastMicName = None self.partList = [] def _addPartsFromMic(): # To avoid parsing the Relion star files...we are assuming here # the order in which Relion is generating the movie-particles per stack # it start part 1, 2, N of frame 1, then 1, 2..N of frame 2 and so on. # If this way changes in the future, the following code could break. # For the sake of performance, I will take the risk now. count = 0 avgFrames = self.avgFrames.get() for frame in range(0, nFrames, avgFrames): frameId = min(frame + avgFrames, nFrames) for mPart in self.partList: mPart.setObjId(None) # clear objId to insert a new one mPart.setFrameId(frameId) count += 1 mPart.setIndex(count) mPart._rlnAverageNrOfFrames = em.Integer(avgFrames) movieParticles.append(mPart) del self.partList # free unnecessary particle list memory self.partList = [] for part in inputParts.iterItems(orderBy='_micId'): micName = part.getCoordinate().getMicName() if micName != self.lastMicName: if self.lastMicName is not None: _addPartsFromMic() self.lastMicName = micName movieBase = '%s_movie.mrcs' % pwutils.removeBaseExt(micName) def _replaceSuffix(suffix): return movieBase.replace('_movie.mrcs', suffix) # Move the resulting stack of movie-particles to extra directly movieStack = self._getExtraPath('output', 'extra', movieBase) self.newMovieStack = self._getExtraPath( _replaceSuffix('_ptcls.mrcs')) pwutils.moveFile(movieStack, self.newMovieStack) # Clean up intermediate files (either links or converted) # plus generated files not needed anymore if not pwutils.envVarOn("SCIPION_DEBUG_NOCLEAN"): pwutils.cleanPath( self._getExtraPath(movieBase), self._getExtraPath(_replaceSuffix('.mrcs'))) # Create a movie particles based on that one and # store in the list of this movie mPart = em.MovieParticle() mPart.copy(part) # copy all information from part mPart.setParticleId(part.getObjId()) mPart.setFileName(self.newMovieStack) self.partList.append(mPart) pwutils.cleanPath(self._getExtraPath('output')) _addPartsFromMic() self._defineOutputs(outputParticles=movieParticles) self._defineSourceRelation(self.inputMovies, movieParticles) self._defineSourceRelation(self.inputParticles, movieParticles)
def _extractMicrograph(self, mic, doInvert, normalizeArgs, doBorders): """ Extract particles from one micrograph """ fnLast = mic.getFileName() baseMicName = pwutils.removeBaseExt(fnLast) outputRoot = str(self._getExtraPath(baseMicName)) fnPosFile = self._getMicPos(mic) boxSize = self.boxSize.get() downFactor = self.downFactor.get() patchSize = self.patchSize.get() if self.patchSize.get() > 0 \ else int(boxSize*1.5*downFactor) particlesMd = 'particles@%s' % fnPosFile # If it has coordinates extract the particles if exists(fnPosFile): # Create a list with micrographs operations (programs in xmipp) and # the required command line parameters (except input/ouput files) micOps = [] # Compute the variance and Gini coeff. of the part. and mic., resp. args = '--pos %s' % fnPosFile args += ' --mic %s' % fnLast args += ' --patchSize %d' % patchSize self.runJob('xmipp_coordinates_noisy_zones_filter', args) def getMicTmp(suffix): return self._getTmpPath(baseMicName + suffix) # Check if it is required to downsample our micrographs if self.notOne(downFactor): fnDownsampled = getMicTmp("_downsampled.xmp") args = "-i %s -o %s --step %f --method fourier" self.runJob('xmipp_transform_downsample', args % (fnLast, fnDownsampled, downFactor)) fnLast = fnDownsampled if self.doRemoveDust: fnNoDust = getMicTmp("_noDust.xmp") args = " -i %s -o %s --bad_pixels outliers %f" self.runJob('xmipp_transform_filter', args % (fnLast, fnNoDust, self.thresholdDust)) fnLast = fnNoDust if self._useCTF(): # We need to write a Xmipp ctfparam file # to perform the phase flip on the micrograph fnCTF = self._getTmpPath("%s.ctfParam" % baseMicName) micrographToCTFParam(mic, fnCTF) # Insert step to flip micrograph if self.doFlip: fnFlipped = getMicTmp('_flipped.xmp') args = " -i %s -o %s --ctf %s --sampling %f" self.runJob( 'xmipp_ctf_phase_flip', args % (fnLast, fnFlipped, fnCTF, self._getNewSampling())) fnLast = fnFlipped else: fnCTF = None args = " -i %s --pos %s" % (fnLast, particlesMd) args += " -o %s --Xdim %d" % (outputRoot, boxSize) if doInvert: args += " --invert" if fnCTF: args += " --ctfparam " + fnCTF if doBorders: args += " --fillBorders" self.runJob("xmipp_micrograph_scissor", args) # Normalize if normalizeArgs: self.runJob('xmipp_transform_normalize', '-i %s.stk %s' % (outputRoot, normalizeArgs)) else: self.warning("The micrograph %s hasn't coordinate file! " % baseMicName) self.warning("Maybe you picked over a subset of micrographs") # Let's clean the temporary mrc micrographs if not pwutils.envVarOn("SCIPION_DEBUG_NOCLEAN"): pwutils.cleanPattern(self._getTmpPath(baseMicName) + '*')
def processMovieStep(self, movieDict, hasAlignment): movie = Movie() movie.setAcquisition(Acquisition()) if hasAlignment: movie.setAlignment(MovieAlignment()) movie.setAttributesFromDict(movieDict, setBasic=True, ignoreMissing=True) movieFolder = self._getOutputMovieFolder(movie) movieFn = movie.getFileName() movieName = basename(movieFn) movieDoneFn = self._getMovieDone(movie) if (self.isContinued() and os.path.exists(movieDoneFn)): self.info("Skipping movie: %s, seems to be done" % movieFn) return # Clean old finished files pwutils.cleanPath(movieDoneFn) if self._filterMovie(movie): pwutils.makePath(movieFolder) pwutils.createLink(movieFn, join(movieFolder, movieName)) if movieName.endswith('bz2'): newMovieName = movieName.replace('.bz2', '') # We assume that if compressed the name ends with .mrc.bz2 if not exists(newMovieName): self.runJob('bzip2', '-d -f %s' % movieName, cwd=movieFolder) elif movieName.endswith('tbz'): newMovieName = movieName.replace('.tbz', '.mrc') # We assume that if compressed the name ends with .tbz if not exists(newMovieName): self.runJob('tar', 'jxf %s' % movieName, cwd=movieFolder) elif movieName.endswith('.txt'): # Support a list of frame as a simple .txt file containing # all the frames in a raw list, we could use a xmd as well, # but a plain text was choose to simply its generation movieTxt = os.path.join(movieFolder, movieName) with open(movieTxt) as f: movieOrigin = os.path.basename(os.readlink(movieFn)) newMovieName = movieName.replace('.txt', '.mrcs') ih = ImageHandler() for i, line in enumerate(f): if line.strip(): inputFrame = os.path.join(movieOrigin, line.strip()) ih.convert(inputFrame, (i+1, os.path.join(movieFolder, newMovieName))) else: newMovieName = movieName convertExt = self._getConvertExtension(newMovieName) correctGain = self._doCorrectGain() if convertExt or correctGain: inputMovieFn = os.path.join(movieFolder, newMovieName) if inputMovieFn.endswith('.em'): inputMovieFn += ":ems" if convertExt: newMovieName = pwutils.replaceExt(newMovieName, convertExt) else: newMovieName = '%s_corrected.%s' % os.path.splitext(newMovieName) outputMovieFn = os.path.join(movieFolder, newMovieName) # If the protocols wants Scipion to apply the gain, then # there is no reason to convert, since we can produce the # output in the format expected by the program. In some cases, # the alignment programs can directly deal with gain and dark # correction images, so we don't need to apply it if self._doCorrectGain(): self.info("Correcting gain and dark '%s' -> '%s'" % (inputMovieFn, outputMovieFn)) gain, dark = self.getGainAndDark() self.correctGain(inputMovieFn, outputMovieFn, gainFn=gain, darkFn=dark) else: self.info("Converting movie '%s' -> '%s'" % (inputMovieFn, outputMovieFn)) ImageHandler().convertStack(inputMovieFn, outputMovieFn) # Just store the original name in case it is needed in _processMovie movie._originalFileName = pwobj.String(objDoStore=False) movie._originalFileName.set(movie.getFileName()) # Now set the new filename (either linked or converted) movie.setFileName(os.path.join(movieFolder, newMovieName)) self.info("Processing movie: %s" % movie.getFileName()) self._processMovie(movie) if pwutils.envVarOn('SCIPION_DEBUG_NOCLEAN'): self.info('Clean movie data DISABLED. ' 'Movie folder will remain in disk!!!') else: self.info("Erasing.....movieFolder: %s" % movieFolder) os.system('rm -rf %s' % movieFolder) # cleanPath(movieFolder) # Mark this movie as finished open(movieDoneFn, 'w').close()