Пример #1
0
 def reportError(self, test, err):
     sys.stderr.write(
         "%s %s\n" %
         (pwutils.redStr('[   FAILED ]'), self.getTestName(test)))
     sys.stderr.write("\n%s" %
                      pwutils.redStr("".join(format_exception(*err))))
     self.testFailed += 1
Пример #2
0
    def computePdbModesStep(self, numberOfModes, RTBblockSize, cutoffStr):
        rc = self._getRc(self._getExtraPath('atoms_distance.hist'))
                
        self._enterWorkingDir()
	# For atoms, the interaction force constant was set to 10 as ElNemo RTB code may ask for its value \
	# (the RTBForceConstant entry was removed from gui as the value does not change the ENM computed normal modes).
        self.runJob('nma_record_info_PDB.py', "%d %d atoms.pdb %f %f"
                    % (numberOfModes, RTBblockSize, rc, 10.0),
                    env=getNMAEnviron())
        self.runJob("nma_elnemo_pdbmat","",env=getNMAEnviron())
        self.runJob("nma_diagrtb","",env=getNMAEnviron())

        if not exists("diagrtb.eigenfacs"):
            msg = "Modes cannot be computed. Check the number of modes you " \
                  "asked to compute and/or consider "
            msg += "increasing cut-off distance. The maximum number of " \
                   "modes allowed by the method for atomic "
            msg += "normal mode analysis is 6 times the number of RTB blocks " \
                   "but the protocol allows only up "
            msg += "to 200 modes as 20-100 modes are usually enough. If the " \
                   "number of modes is below the minimum "
            msg += "between 200 and 6 times the number of RTB blocks, consider " \
                   "increasing cut-off distance."
            self._printWarnings(redStr(msg) + '\n')
        self.runJob("rm","-f *.dat_run diagrtb.dat pdbmat.xyzm pdbmat.sdijf "
                         "pdbmat.dat")
        
        self._leaveWorkingDir()
Пример #3
0
def _submit(hostConfig, submitDict, cwd=None, env=None):
    """ Submit a protocol to a queue system. Return its job id.
    """
    # Create first the submission script to be launched
    # formatting using the template
    template = hostConfig.getSubmitTemplate() % submitDict
    # FIXME: CREATE THE PATH FIRST
    scripPath = submitDict['JOB_SCRIPT']
    f = open(scripPath, 'w')
    # Ensure the path exists
    makeFilePath(scripPath)
    # Add some line ends because in some clusters it fails
    # to submit jobs if the submit script does not have end of line
    f.write(template + '\n\n')
    f.close()
    # This should format the command using a template like:
    # "qsub %(JOB_SCRIPT)s"
    command = hostConfig.getSubmitCommand() % submitDict
    gcmd = greenStr(command)
    print("** Submitting to queue: '%s'" % gcmd)

    p = Popen(command, shell=True, stdout=PIPE, cwd=cwd, env=env)
    out = p.communicate()[0]
    # Try to parse the result of qsub, searching for a number (jobId)
    # Review this, seems to exclusive to torque batch system
    s = re.search('(\d+)', str(out))
    if p.returncode == 0 and s:
        job = int(s.group(0))
        print("Launched job with id %s" % job)
        return job
    else:
        print("Couldn't submit to queue for reason: %s " %
              redStr(out.decode()))
        return UNKNOWN_JOBID
Пример #4
0
def _submit(hostConfig, submitDict):
    """ Submit a protocol to a queue system. Return its job id.
    """
    # Create forst the submission script to be launched
    # formatting using the template
    template = hostConfig.getSubmitTemplate() % submitDict
    #FIXME: CREATE THE PATH FIRST
    scripPath = submitDict['JOB_SCRIPT']
    f = open(scripPath, 'w')
    #Ensure the path exists
    makeFilePath(scripPath)
    # Add some line ends because in some clusters it fails
    # to submit jobs if the submit script does not have end of line
    f.write(template+'\n\n')
    f.close()
    # This should format the command using a template like: 
    # "qsub %(JOB_SCRIPT)s"
    command = hostConfig.getSubmitCommand() % submitDict
    gcmd = greenStr(command)
    print "** Submiting to queue: '%s'" % gcmd
    p = Popen(command, shell=True, stdout=PIPE)
    out = p.communicate()[0]
    # Try to parse the result of qsub, searching for a number (jobId)
    s = re.search('(\d+)', out)
    if s:
        return int(s.group(0))
    else:
        print "** Couldn't parse %s ouput: %s" % (gcmd, redStr(out)) 
        return UNKNOWN_JOBID
Пример #5
0
    def getPackageState(cls, packageName, version):
        """
        Check if a package needs to be updated or not
        args: packageName: the package name
              version: version of the installed package
        return: (True, version) if the the package needs to be updated, otherwise
                (False, version)

        """
        # Ignore autocheck of outdated package that happens at import time
        os.environ["OUTDATED_IGNORE"] = "1"
        from outdated import check_outdated
        from requests.exceptions import ConnectionError
        try:
            checkOutdated = check_outdated(packageName, version)
        except ConnectionError as connError:
            print("Cannot check update status of %s (%s)" %
                  (packageName, version))
            return False, version
        except ValueError:
            # We intentionally skip this error
            # When working in devel mode with an increased version not yet release this Value error
            # happens: --> example: Version 3.0.2 is greater than the latest version on PyPI: 3.0.1
            return False, version
        except Exception as ex:
            print(redStr('%s :%s' % (packageName, ex)))
            return False, version

        return checkOutdated
Пример #6
0
 def createOutputStep(self):
     """ This function is shared by Xmipp and CTFfind
     estimation, or recalculate, protocols.
     if is recalculate, it will iterated for each CTF model, see
     if was recalculated and update with new defocus values.
     Else, the function that should be implemented in each subclass.
     """
     if self.recalculate:
         ctfSet = self._createSetOfCTF("_recalculated")
         prot = self.continueRun.get() or self
         if hasattr(prot, ProtCTFMicOutputs.outputCTF.name):
             micSet = prot.outputCTF.getMicrographs()
             # We suppose this is reading the ctf selection
             # (with enabled/disabled) to only consider the enabled ones
             # in the final SetOfCTF
             # TODO: maybe we can remove the need of the extra text file
             # with the recalculate parameters
             newCount = 0
             for ctfModel in self.recalculateSet:
                 if ctfModel.isEnabled() and ctfModel.getObjComment():
                     mic = ctfModel.getMicrograph()
                     # Update the CTF models that where recalculated and append
                     # later to the set, we don't want to copy the id here since
                     # it is already correct
                     newCtf = self._createCtfModel(mic,
                                                   updateSampling=False)
                     ctfModel.copy(newCtf, copyId=False)
                     ctfModel.setEnabled(True)
                     newCount += 1
                 ctfSet.append(ctfModel)
             ctfSet.setMicrographs(micSet)
             self._defineOutputs(
                 **{ProtCTFMicOutputs.outputCTF.name: ctfSet})
             self._defineCtfRelation(micSet, ctfSet)
             self._computeDefocusRange(ctfSet)
             self.summaryVar.set("CTF Re-estimation of %d micrographs" %
                                 newCount)
         else:
             raise Exception(
                 pwutils.redStr(
                     "The outputCTF do not exist, all CTFs failed."))
     else:
         self._createOutputStep()
         if self.outputCTF.getSize() == 0:
             raise Exception(
                 pwutils.redStr("outputCTF has size zero, all CTFs failed."
                                "Please review processing steps above."))
 def createOutputStep(self):
     # validate that we have some output movies
     failedList = self._readFailedList()
     if len(failedList) == len(self.listOfMovies):
         raise Exception(redStr("All movies failed, didn't create outputMicrographs."
                                "Please review movie processing steps above."))
     elif 0 < len(failedList) < len(self.listOfMovies):
         self.warning(yellowStr("WARNING - Failed to align %d movies." % len(failedList)))
Пример #8
0
 def doReport(self):
     secs = time.time() - self.startTimeAll
     sys.stderr.write(
         "\n%s run %d tests (%0.3f secs)\n" %
         (pwutils.greenStr("[==========]"), self.numberTests, secs))
     if self.testFailed:
         sys.stderr.write("%s %d tests\n" %
                          (pwutils.redStr("[  FAILED  ]"), self.testFailed))
     sys.stdout.write("%s %d tests\n" %
                      (pwutils.greenStr("[  PASSED  ]"),
                       self.numberTests - self.testFailed))
Пример #9
0
 def getEmanPlugin(self):
     # --- Eman2 dependencies ---
     try:
         emanPlugin = Domain.importFromPlugin("eman2", "Plugin",
                                              doRaise=True)
         emanPlugin._defineVariables()
     except Exception as e:
         print(pwutils.redStr("Eman plugin does not installed....You need to install it "
               "first."))
         return None
     return emanPlugin
Пример #10
0
def _launchRemote(protocol, wait):
    p = _runRemote(protocol, 'run')
    jobId = UNKNOWN_JOBID    
    out, err = p.communicate()
    if err:
        raise Exception(err)
    s = re.search('Scipion remote jobid: (\d+)', out)
    if s:
        jobId = int(s.group(1))
    else:
        raise Exception("** Couldn't parse ouput: %s" % redStr(out))
             
    return jobId    
Пример #11
0
 def computeModesStep(self, fnPseudoatoms, numberOfModes, cutoffStr):
     (baseDir,fnBase)=os.path.split(fnPseudoatoms)
     fnBase=fnBase.replace(".pdb","")
     fnDistanceHist=os.path.join(baseDir,'extra',fnBase+'_distance.hist')
     rc = self._getRc(fnDistanceHist)
     self._enterWorkingDir()
     self.runJob('nma_record_info.py', "%d %s.pdb %d" % (numberOfModes, fnBase, rc),env=getNMAEnviron())
     self.runJob("nma_pdbmat.pl","pdbmat.dat",env=getNMAEnviron())
     self.runJob("nma_diag_arpack","",env=getNMAEnviron())
     if not exists("fort.11"):
         self._printWarnings(redStr("Modes cannot be computed. Check the number of modes you asked to compute and/or consider increasing cut-off distance. The maximum number of modes allowed by the method for pseudoatomic normal mode analysis is 3 times the number of pseudoatoms but the protocol allows only up to 200 modes as 20-100 modes are usually enough.  If the number of modes is below the minimum between 200 and 3 times the number of pseudoatoms, consider increasing cut-off distance."))
     cleanPath("diag_arpack.in", "pdbmat.dat")
     self._leaveWorkingDir()
Пример #12
0
def _frombase(inFileName, outFileName, log, oParam=1):
    # check if maxit exists,
    # if it does not then complain
    # convert pdb to cif using maxit
    global maxitAvailable
    try:
        maxitAvailable
    except:
        if not os.path.exists(Plugin.getMaxitBin()):
            maxitAvailable = False
            # show error message
        else:
            maxitAvailable = True

    if maxitAvailable:
        args = ' -input "' + inFileName + '" -output "' + outFileName + \
               '" -o %d' % oParam
        log.info('Launching: ' + Plugin.getMaxitBin() + args)
        # run in the background
        env = getEnviron()
        pwutils.runJob(None, Plugin.getMaxitBin(), args, env=env)
    else:
        # this is not the ideal conversion but it is better
        # than nothing
        aSH = AtomicStructHandler()
        aSH.read(inFileName)
        aSH.write(outFileName)
        # show error message
        print(
            pwutils.redStr(
                "Please, install maxit with the command 'scipion installb maxit'"
            ))
        print(
            pwutils.redStr(
                "and restart scipion. Packages bison and flex are needed."))
        print(
            pwutils.redStr("If maxit is installed check %s in scipion.conf" %
                           MAXIT_HOME))
Пример #13
0
 def getBinVersions(self):
     """Get list with names of binaries of this plugin"""
     env = Environment()
     env.setDefault(False)
     defaultTargets = [target.getName() for target in env.getTargetList()]
     plugin = self.getPluginClass()
     if plugin is not None:
         try:
             plugin.defineBinaries(env)
         except Exception as e:
             print(
                 redStr("Error retrieving plugin %s binaries: " %
                        self.name), e)
     binVersions = [
         target.getName() for target in env.getTargetList()
         if target.getName() not in defaultTargets
     ]
     return binVersions
Пример #14
0
    def computePdbModesStep(self, numberOfModes, RTBblockSize, RTBForceConstant, cutoffStr):
        rc = self._getRc(self._getExtraPath('atoms_distance.hist'))
                
        self._enterWorkingDir()
        self.runJob('nma_record_info_PDB.py', "%d %d atoms.pdb %f %f" % (numberOfModes, RTBblockSize, rc, RTBForceConstant),
                    env=getNMAEnviron())
        self.runJob("nma_elnemo_pdbmat","",env=getNMAEnviron())
        self.runJob("nma_diagrtb","",env=getNMAEnviron())

        if not exists("diagrtb.eigenfacs"):
            msg = "Modes cannot be computed. Check the number of modes you asked to compute and/or consider "
            msg += "increasing cut-off distance. The maximum number of modes allowed by the method for atomic "
            msg += "normal mode analysis is 6 times the number of RTB blocks but the protocol allows only up "
            msg += "to 200 modes as 20-100 modes are usually enough. If the number of modes is below the minimum "
            msg += "between 200 and 6 times the number of RTB blocks, consider increasing cut-off distance."
            self._printWarnings(redStr(msg) + '\n')
        self.runJob("rm","-f *.dat_run diagrtb.dat pdbmat.xyzm pdbmat.sdijf pdbmat.dat")
        
        self._leaveWorkingDir()
Пример #15
0
    def computePdbModesStep(self, numberOfModes, RTBblockSize, RTBForceConstant, cutoffStr):
        rc = self._getRc(self._getExtraPath('atoms_distance.hist'))
                
        self._enterWorkingDir()
        self.runJob('nma_record_info_PDB.py', "%d %d atoms.pdb %f %f" % (numberOfModes, RTBblockSize, rc, RTBForceConstant),
                    env=getNMAEnviron())
        self.runJob("nma_elnemo_pdbmat","",env=getNMAEnviron())
        self.runJob("nma_diagrtb","",env=getNMAEnviron())

        if not exists("diagrtb.eigenfacs"):
            msg = "Modes cannot be computed. Check the number of modes you asked to compute and/or consider "
            msg += "increasing cut-off distance. The maximum number of modes allowed by the method for atomic "
            msg += "normal mode analysis is 6 times the number of RTB blocks but the protocol allows only up "
            msg += "to 200 modes as 20-100 modes are usually enough. If the number of modes is below the minimum "
            msg += "between 200 and 6 times the number of RTB blocks, consider increasing cut-off distance."
            self._printWarnings(redStr(msg) + '\n')
        self.runJob("rm","-f *.dat_run diagrtb.dat pdbmat.xyzm pdbmat.sdijf pdbmat.dat")
        
        self._leaveWorkingDir()
Пример #16
0
 def computeModesStep(self, fnPseudoatoms, numberOfModes, cutoffStr):
     (baseDir, fnBase) = os.path.split(fnPseudoatoms)
     fnBase = fnBase.replace(".pdb", "")
     fnDistanceHist = os.path.join(baseDir, 'extra',
                                   fnBase + '_distance.hist')
     rc = self._getRc(fnDistanceHist)
     self._enterWorkingDir()
     self.runJob('nma_record_info.py',
                 "%d %s.pdb %d" % (numberOfModes, fnBase, rc),
                 env=getNMAEnviron())
     self.runJob("nma_pdbmat.pl", "pdbmat.dat", env=getNMAEnviron())
     self.runJob("nma_diag_arpack", "", env=getNMAEnviron())
     if not exists("fort.11"):
         self._printWarnings(
             redStr(
                 "Modes cannot be computed. Check the number of modes you asked to compute and/or consider increasing cut-off distance. The maximum number of modes allowed by the method for pseudoatomic normal mode analysis is 3 times the number of pseudoatoms but the protocol allows only up to 200 modes as 20-100 modes are usually enough.  If the number of modes is below the minimum between 200 and 3 times the number of pseudoatoms, consider increasing cut-off distance."
             ))
     cleanPath("diag_arpack.in", "pdbmat.dat")
     self._leaveWorkingDir()
Пример #17
0
    def getPackagesStatus(cls, printAll=True):
        """
        Check for scipion-app, scipion-pyworkflow and scipion-em updates
        return: a list of modules to be updated
        """
        outdatedPackages = []
        for package in cls.packageNames:

            needToUpdate, version = cls.getPackageState(package[0], package[1])
            if needToUpdate:
                outdatedPackages.append((package[0], version))
                print(
                    redStr(
                        'The package %s is out of date. Your version is %s, '
                        'the latest is %s.' %
                        (package[0], package[1], version)))
            elif printAll:
                print(
                    greenStr('The package %s is up to date.  Your version '
                             'is %s' % (package[0], version)))

        return outdatedPackages
Пример #18
0
def assignAllParams(argsList, template):
    """
    Assign CML params to the template, if missing params after assignment
    return False
    """
    paramsSetted = 0
    template.parseContent()
    if argsList:

        for attr in argsList:
            # skipp --params
            if attr.startswith(FLAG_PARAM):
                continue

            aliasAttr, valAttr = attr.split('=')
            try:
                paramsSetted += template.setParamValue(aliasAttr, valAttr)
            except Exception as e:
                print(pwutils.redStr(e))
                sys.exit(os.EX_DATAERR)

    return len(template.params) == paramsSetted
Пример #19
0
def _submit(hostConfig, submitDict):
    """ Submit a protocol to a queue system. Return its job id.
    """
    # Create forst the submission script to be launched
    # formatting using the template
    template = hostConfig.getSubmitTemplate() % submitDict
    #FIXME: CREATE THE PATH FIRST
    scripPath = submitDict['JOB_SCRIPT']
    f = open(scripPath, 'w')
    #Ensure the path exists
    makeFilePath(scripPath)
    # Add some line ends because in some clusters it fails
    # to submit jobs if the submit script does not have end of line
    f.write(template + '\n\n')
    f.close()
    # This should format the command using a template like:
    # "qsub %(JOB_SCRIPT)s"
    command = hostConfig.getSubmitCommand() % submitDict
    gcmd = greenStr(command)
    print "** Submiting to queue: '%s'" % gcmd

    # zf = open('/home/jtq89441/Desktop/scipion.log','w+')
    # zf.write('It works!%s'%submitDict)
    # zf.close()
    #  ----------------------------------
    DLS_SCIPION = '/dls_sw/apps/scipion/release-1.2.1-zo'

    # command_for_recipe = 'module load %s &&'%DLS_SCIPION +'; '+ command

    projpath = submitDict['JOB_COMMAND'].split()[4]

    command_for_queue = '%s %s' % (command.split()[0], '/'.join(
        [projpath, command.split()[1]]))

    print 'command_for_queue: %s' % command_for_queue

    zocolo_cmd = 'module load dials; dials.python /dls_sw/apps/scipion/scipion_1_2_1_dials/scipion/pyworkflow/protocol/generic_template.py %s' % command_for_queue

    print zocolo_cmd

    print '****Before Zocolo****'
    msg_p = Popen(zocolo_cmd, shell=True)
    print '****After Zocolo****'

    #  ------------------------------------

    #Generating the recipe for ActiveMQ
    # default_configuration = '/dls_sw/apps/zocalo/secrets/credentials-live.cfg'
    # # override default stomp host
    # try:
    #     StompTransport.load_configuration_file(default_configuration)
    # except workflows.Error as e:
    #     print "Error: %s\n" % str(e)
    #
    # # StompTransport.add_command_line_options(parser)
    # # (options, args) = parser.parse_args(sys.argv[1:])
    # stomp = StompTransport()
    #
    # message = {'recipes': [],
    #            'parameters': {},
    #            }
    # # Build a custom recipe
    # command_for_recipe = 'module load scipion/release-1.2.1-headless &&' + command
    #
    # recipe = {}
    # recipe['1'] = {}
    # recipe['1']['service'] = "motioncor2_runner"
    # recipe['1']['queue'] = "motioncor2_runner"
    # recipe['1']['parameters'] = {}
    # recipe['1']['parameters']['arguments'] = command_for_recipe
    # recipe['start'] = [[1, []]]
    #
    # message['custom_recipe'] = recipe
    # print "******************************** THIS IS THE SUBMITTED RECIPE**********************************************"
    #
    # stomp.connect()
    # test_valid_recipe = workflows.recipe.Recipe(recipe)
    # test_valid_recipe.validate()
    # print message
    #
    # stomp.send('processing_recipe',message)
    # print("\nMotioncor2 job submitted")
    ## end of recipe generation

    # Npn zocalo scipion send command

    p = Popen(command, shell=True, stdout=PIPE)
    out = p.communicate()[0]
    # Try to parse the result of qsub, searching for a number (jobId)
    s = re.search('(\d+)', out)
    if s:
        return int(s.group(0))
    else:
        print "** Couldn't parse %s ouput: %s" % (gcmd, redStr(out))
        return UNKNOWN_JOBID
Пример #20
0
 def qualifyModesStep(self, numberOfModes, collectivityThreshold, structureEM, suffix=''):
     self._enterWorkingDir()
     
     fnVec = glob("modes/vec.*")
     
     if len(fnVec) < numberOfModes:
         msg = "There are only %d modes instead of %d. "
         msg += "Check the number of modes you asked to compute and/or consider increasing cut-off distance."
         msg += "The maximum number of modes allowed by the method for atomic normal mode analysis is 6 times"
         msg += "the number of RTB blocks and for pseudoatomic normal mode analysis 3 times the number of pseudoatoms. "
         msg += "However, the protocol allows only up to 200 modes as 20-100 modes are usually enough. If the number of"
         msg += "modes is below the minimum between these two numbers, consider increasing cut-off distance." 
         self._printWarnings(redStr(msg % (len(fnVec), numberOfModes)))
         print redStr('Warning: There are only %d modes instead of %d.'% (len(fnVec), numberOfModes))
         print redStr("Check the number of modes you asked to compute and/or consider increasing cut-off distance.")
         print redStr("The maximum number of modes allowed by the method for atomic normal mode analysis is 6 times")
         print redStr("the number of RTB blocks and for pseudoatomic normal mode analysis 3 times the number of pseudoatoms.")
         print redStr("However, the protocol allows only up to 200 modes as 20-100 modes are usually enough. If the number of")
         print redStr("modes is below the minimum between these two numbers, consider increasing cut-off distance.")
        
     fnDiag = "diagrtb.eigenfacs"
     
     if structureEM:
         self.runJob("nma_reformatForElNemo.sh", "%d" % len(fnVec),env=getNMAEnviron())
         fnDiag = "diag_arpack.eigenfacs"
         
     self.runJob("echo", "%s | nma_check_modes" % fnDiag,env=getNMAEnviron())
     cleanPath(fnDiag)
     
     fh = open("Chkmod.res")
     mdOut = xmipp.MetaData()
     collectivityList = []
     
     for n in range(len(fnVec)):
         line = fh.readline()
         collectivity = float(line.split()[1])
         collectivityList.append(collectivity)
 
         objId = mdOut.addObject()
         modefile = self._getPath("modes", "vec.%d" % (n+1))
         mdOut.setValue(xmipp.MDL_NMA_MODEFILE, modefile, objId)
         mdOut.setValue(xmipp.MDL_ORDER, long(n+1), objId)
         
         if n >= 6:
             mdOut.setValue(xmipp.MDL_ENABLED, 1, objId)
         else:
             mdOut.setValue(xmipp.MDL_ENABLED, -1, objId)
         mdOut.setValue(xmipp.MDL_NMA_COLLECTIVITY, collectivity, objId)
         
         if collectivity < collectivityThreshold:
             mdOut.setValue(xmipp.MDL_ENABLED,-1,objId)
     fh.close()
     idxSorted = [i[0] for i in sorted(enumerate(collectivityList), key=lambda x:x[1])]
     
     score = []
     for j in range(len(fnVec)):
         score.append(0)
     
     modeNum = []
     l = 0
     for k in range(len(fnVec)):
         modeNum.append(k)
         l += 1
     
     #score = [0]*numberOfModes
     for i in range(len(fnVec)):
         score[i] += i+1
         score[idxSorted[i]] += modeNum[i] - i
     i = 0
     for objId in mdOut:
         score_i = float(score[i])/(2.0*l)
         mdOut.setValue(xmipp.MDL_NMA_SCORE, score_i, objId)
         i+=1
     mdOut.write("modes%s.xmd"%suffix)
     cleanPath("Chkmod.res")
     
     self._leaveWorkingDir()
Пример #21
0
def installPluginMethods():
    """ Deals with plugin installation methods"""

    # Trigger plugin's variable definition
    Config.getDomain().getPlugins()

    invokeCmd = SCIPION_CMD + " " + sys.argv[1]
    pluginRepo = PluginRepository()

    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawTextHelpFormatter)
    subparsers = parser.add_subparsers(
        help='mode "installp", "uninstallp" or "listb"',
        dest='mode',
        title='Mode',
        description='available modes are "installp" or "uninstallp"')

    ############################################################################
    #                               Install parser                             #
    ############################################################################

    installParser = subparsers.add_parser(
        "installp",
        formatter_class=argparse.RawTextHelpFormatter,
        usage="%s  [-h] [--noBin] [-p pluginName [pipVersion ...]]" %
        invokeCmd,
        epilog="Example: %s -p scipion-em-motioncorr 1.0.6 "
        "-p scipion-em-relion -p scipion-em-eman2 \n\n" % invokeCmd,
        add_help=False)
    installParser.add_argument('-h',
                               '--help',
                               action='store_true',
                               help='show help')
    installParser.add_argument(
        '--noBin',
        action='store_true',
        help='Optional flag to install plugins only as a python module,\n'
        'without installing the plugin binaries. This will affect\n'
        'all plugins specified in the command.')
    installParser.add_argument(
        '--checkUpdates',
        action='store_true',
        help='Optional flag to check which plugins have new releases.\n')
    installParser.add_argument(
        '-p',
        '--plugin',
        action='append',
        nargs='+',
        metavar=('pluginName', 'pluginVersion'),
        help=
        '- pluginName:     the name of the plugin to install from the list\n'
        '                 of available plugins shown below.\n'
        '- pluginVersion: (optional) pip version to install. If not specified,\n'
        '                 will install the latest compatible with current Scipion.'
    )

    installParser.add_argument(
        '--devel',
        action='store_true',
        help=
        'Optional flag to indicate that we will pass install sources instead\n'
        'of pip names. Sources might be local paths or git urls. With local\n'
        'paths, will do pip install -e (editable mode). It is expected to find\n'
        'the plugin name in the basename of the path or in the repo name. \n'
        '(i.e. it needs to match the one specified in setup.py). E.g:\n'
        'scipion installp -p path/to/pluginName --devel \n'
        'scipion installp -p https://github.com/someOrg/pluginName.git --devel'
    )
    installParser.add_argument('-j',
                               default='1',
                               metavar='j',
                               help='Number of CPUs to use for compilation \n')

    ############################################################################
    #                             Uninstall parser                             #
    ############################################################################

    uninstallParser = subparsers.add_parser(
        "uninstallp",
        formatter_class=argparse.RawTextHelpFormatter,
        usage="%s  [-h] [-p pluginName [binVersion ...]]" % invokeCmd,
        epilog="Example: %s -p scipion-em-eman2 scipion-em-motioncorr \n\n" %
        invokeCmd,
        add_help=False)
    uninstallParser.add_argument('-h',
                                 '--help',
                                 action='store_true',
                                 help='show help')
    uninstallParser.add_argument(
        '--noBin',
        action='store_true',
        help='Optional flag to uninstall plugins only as a python module,\n'
        'without uninstalling the plugin binaries. This will affect\n'
        'all plugins specified in the command.')
    uninstallParser.add_argument(
        '-p',
        '--plugin',
        action='append',
        metavar='pluginName',
        help='The name of the plugin to uninstall from the list\n'
        'of available plugins shown below.\n')

    ############################################################################
    #                           Install Bins parser                            #
    ############################################################################

    installBinParser = subparsers.add_parser(
        "installb",
        formatter_class=argparse.RawTextHelpFormatter,
        usage="%s  [-h] binName1 binName2-1.2.3 binName3 ..." % invokeCmd,
        epilog="Example: %s ctffind4 eman-2.3\n\n" % invokeCmd,
        add_help=False)
    # installBinParser.add_argument('pluginName', metavar='pluginName',
    #                              help='The name of the plugin whose bins we want to uninstall.\n')
    installBinParser.add_argument('-h',
                                  '--help',
                                  action='store_true',
                                  help='show help')
    installBinParser.add_argument(
        'binName',
        nargs='*',
        metavar='binName(s)',
        help='The name(s) of the bins we want install, optionally with \n'
        'version in the form name-version. If no version is specified,\n'
        'will install the last one.')
    installBinParser.add_argument(
        '-j',
        default='1',
        metavar='j',
        help='Number of CPUs to use for compilation \n')

    ############################################################################
    #                          Uninstall Bins parser                           #
    ############################################################################

    uninstallBinParser = subparsers.add_parser(
        "uninstallb",
        formatter_class=argparse.RawTextHelpFormatter,
        usage="%s [-h] binName1 binName2-1.2.3 binName3 ..." % invokeCmd,
        epilog="Example: %s ctffind4 relion-3.0\n\n " % invokeCmd,
        add_help=False)
    # uninstallBinParser.add_argument('pluginName', metavar='pluginName',
    #                                help='The name of the plugin whose bins we want to uninstall.\n')
    uninstallBinParser.add_argument('-h',
                                    '--help',
                                    action='store_true',
                                    help='show help')
    uninstallBinParser.add_argument(
        'binName',
        nargs='+',
        metavar='binName(s)',
        help='The name(s) of the bins we want to uninstall\n'
        '(optionally with version in the form name-version). \n'
        'If no version is specified, will uninstall the last one.\n')

    modeToParser = {
        MODE_INSTALL_BINS: installBinParser,
        MODE_UNINSTALL_BINS: uninstallBinParser,
        MODE_INSTALL_PLUGIN: installParser,
        MODE_UNINSTALL_PLUGIN: uninstallParser
    }

    parsedArgs = parser.parse_args(sys.argv[1:])
    mode = parsedArgs.mode
    parserUsed = modeToParser[mode]
    exitWithErrors = False

    if parsedArgs.help or (mode in [MODE_INSTALL_BINS, MODE_UNINSTALL_BINS]
                           and len(parsedArgs.binName) == 0):

        if mode not in [MODE_INSTALL_BINS, MODE_UNINSTALL_BINS]:
            parserUsed.epilog += pluginRepo.printPluginInfoStr()
        else:
            env = Environment()
            env.setDefault(False)
            installedPlugins = Config.getDomain().getPlugins()
            for p, pobj in iteritems(installedPlugins):
                try:
                    pobj.Plugin.defineBinaries(env)
                except Exception as e:
                    print(
                        redStr("Error retrieving plugin %s binaries: " %
                               str(p)), e)
            parserUsed.epilog += env.printHelp()
        parserUsed.print_help()
        parserUsed.exit(0)

    elif mode == MODE_INSTALL_PLUGIN:
        if parsedArgs.checkUpdates:
            print(pluginRepo.printPluginInfoStr(withUpdates=True))
            installParser.exit(0)

        if parsedArgs.devel:
            for p in parsedArgs.plugin:
                pluginSrc = p[0]
                pluginName = ""
                if os.path.exists(pluginSrc):
                    pluginName = os.path.basename(
                        os.path.abspath(pluginSrc).rstrip('/'))
                else:  # we assume it is a git url
                    m = re.match('https://github.com/(.*)/(.*).git', pluginSrc)
                    if m:
                        pluginName = m.group(2)
                if not pluginName:
                    print("ERROR: Couldn't find pluginName for source %s" %
                          pluginSrc)
                    exitWithErrors = True
                else:
                    plugin = PluginInfo(pipName=pluginName,
                                        pluginSourceUrl=pluginSrc,
                                        remote=False)
                    numberProcessor = parsedArgs.j
                    installed = plugin.installPipModule()
                    if installed and not parsedArgs.noBin:
                        plugin.installBin({'args': ['-j', numberProcessor]})
        else:
            pluginsToInstall = list(zip(*parsedArgs.plugin))[0]
            pluginDict = pluginRepo.getPlugins(pluginList=pluginsToInstall,
                                               getPipData=True)
            if not pluginDict:
                exitWithErrors = True
            else:
                for cmdTarget in parsedArgs.plugin:
                    pluginName = cmdTarget[0]
                    pluginVersion = "" if len(cmdTarget) == 1 else cmdTarget[1]
                    numberProcessor = parsedArgs.j
                    plugin = pluginDict.get(pluginName, None)
                    if plugin:
                        installed = plugin.installPipModule(
                            version=pluginVersion)
                        if installed and not parsedArgs.noBin:
                            plugin.installBin(
                                {'args': ['-j', numberProcessor]})
                    else:
                        print("WARNING: Plugin %s does not exist." %
                              pluginName)
                        exitWithErrors = True

    elif parsedArgs.mode == MODE_UNINSTALL_PLUGIN:

        for pluginName in parsedArgs.plugin:
            plugin = PluginInfo(pluginName, pluginName, remote=False)
            if plugin.isInstalled():
                if not parsedArgs.noBin:
                    plugin.uninstallBins()
                plugin.uninstallPip()
            else:
                print("WARNING: Plugin %s is not installed." % pluginName)

    elif parsedArgs.mode == MODE_INSTALL_BINS:
        binToInstallList = parsedArgs.binName
        binToPlugin = pluginRepo.getBinToPluginDict()
        for binTarget in binToInstallList:
            pluginTargetName = binToPlugin.get(binTarget, None)
            if pluginTargetName is None:
                print('ERROR: Could not find target %s' % binTarget)
                continue
            pmodule = Config.getDomain().getPlugin(pluginTargetName)
            numberProcessor = parsedArgs.j
            pinfo = PluginInfo(name=pluginTargetName,
                               plugin=pmodule,
                               remote=False)
            pinfo.installBin({'args': [binTarget, '-j', numberProcessor]})

    elif parsedArgs.mode == MODE_UNINSTALL_BINS:

        binToInstallList = parsedArgs.binName
        binToPlugin = pluginRepo.getBinToPluginDict()
        for binTarget in binToInstallList:
            pluginTargetName = binToPlugin.get(binTarget, None)
            if pluginTargetName is None:
                print('ERROR: Could not find target %s' % binTarget)
                continue
            pmodule = Config.getDomain().getPlugin(pluginTargetName)
            pinfo = PluginInfo(name=pluginTargetName,
                               plugin=pmodule,
                               remote=False)
            pinfo.uninstallBins([binTarget])

    if exitWithErrors:
        parserUsed.exit(1)
    else:
        parserUsed.exit(0)
    def _checkNewOutput(self):
        if getattr(self, 'finished', False):
            return

        # Load previously done items (from text file)
        doneList = self._readDoneList()
        # Check for newly done items
        newDone = [m for m in self.listOfMovies
                   if m.getObjId() not in doneList and self._isMovieDone(m)]

        # Update the file with the newly done movies
        # or exit from the function if no new done movies
        self.debug('_checkNewOutput: ')
        self.debug('   listOfMovies: %s, doneList: %s, newDone: %s'
                   % (len(self.listOfMovies), len(doneList), len(newDone)))

        firstTime = len(doneList) == 0
        allDone = len(doneList) + len(newDone)
        # We have finished when there is not more input movies (stream closed)
        # and the number of processed movies is equal to the number of inputs
        self.finished = self.streamClosed and allDone == len(self.listOfMovies)
        streamMode = pwobj.Set.STREAM_CLOSED if self.finished else pwobj.Set.STREAM_OPEN

        if newDone:
            self._writeDoneList(newDone)

        elif not self.finished:
            # If we are not finished and no new output have been produced
            # it does not make sense to proceed and updated the outputs
            # so we exit from the function here
            return

        self.debug('   finished: %s ' % self.finished)
        self.debug('        self.streamClosed (%s) AND' % self.streamClosed)
        self.debug('        allDone (%s) == len(self.listOfMovies (%s)'
                   % (allDone, len(self.listOfMovies)))
        self.debug('   streamMode: %s' % streamMode)

        if self._createOutputMovies():
            saveMovie = self.getAttributeValue('doSaveMovie', False)
            movieSet = self._loadOutputSet(emobj.SetOfMovies, 'movies.sqlite',
                                           fixSampling=saveMovie)

            # If need to save the movie
            if saveMovie:
                movieSet.setGain(None)
                movieSet.setDark(None)

            for movie in newDone:
                try:
                    newMovie = self._createOutputMovie(movie)
                    if newMovie.getAlignment().getShifts()[0]:
                        movieSet.append(newMovie)
                    else:
                        print(pwutils.yellowStr("WARNING: Movie %s has empty alignment "
                                                "data, can't add it to output set."
                                                % movie.getFileName()))

                # Warn about any exception creating the movie
                except Exception as e:
                    print(pwutils.redStr("ERROR: Movie %s couldn't be "
                                         "added to the output set.\n%s"
                                         % (movie.getFileName(), e)))

            self._updateOutputSet('outputMovies', movieSet, streamMode)

            if firstTime:
                # Probably is a good idea to store a cached summary for the
                # first resulting movie of the processing.
                self._storeSummary(newDone[0])
                # If the movies are not written out, then dimensions can be
                # copied from the input movies
                if not saveMovie:
                    movieSet.setDim(self.inputMovies.get().getDim())
                self._defineTransformRelation(self.inputMovies, movieSet)

        def _updateOutputMicSet(sqliteFn, getOutputMicName, outputName):
            """ Updated the output micrographs set with new items found. """
            micSet = self._loadOutputSet(emobj.SetOfMicrographs, sqliteFn)
            doneFailed = []

            for movie in newDone:
                mic = micSet.ITEM_TYPE()
                mic.copyObjId(movie)
                mic.setMicName(movie.getMicName())
                # The subclass protocol is responsible of generating the output
                # micrograph file in the extra path with the required name
                extraMicFn = self._getExtraPath(getOutputMicName(movie))
                mic.setFileName(extraMicFn)
                if not os.path.exists(extraMicFn):
                    print(pwutils.yellowStr("WARNING: Micrograph %s was not generated, "
                                            "can't add it to output set." % extraMicFn))
                    doneFailed.append(movie)
                    continue
                self._preprocessOutputMicrograph(mic, movie)
                micSet.append(mic)

            self._updateOutputSet(outputName, micSet, streamMode)
            if doneFailed:
                self._writeFailedList(doneFailed)

            if firstTime:
                # We consider that Movies are 'transformed' into the Micrographs
                # This will allow to extend the CTF associated to a set of
                # micrographs to another set of micrographs generated from a
                # different movie alignment
                self._defineTransformRelation(self.inputMovies, micSet)

        if self._createOutputMicrographs():
            _updateOutputMicSet('micrographs.sqlite',
                                self._getOutputMicName,
                                'outputMicrographs')

        if self._createOutputWeightedMicrographs():
            _updateOutputMicSet('micrographs_dose-weighted.sqlite',
                                self._getOutputMicWtName,
                                'outputMicrographsDoseWeighted')

        if self.finished:  # Unlock createOutputStep if finished all jobs
            outputStep = self._getFirstJoinStep()
            if outputStep and outputStep.isWaiting():
                outputStep.setStatus(pwcts.STATUS_NEW)
Пример #23
0
    def qualifyModesStep(self,
                         numberOfModes,
                         collectivityThreshold,
                         structureEM,
                         suffix=''):
        self._enterWorkingDir()

        fnVec = glob("modes/vec.*")

        if len(fnVec) < numberOfModes:
            msg = "There are only %d modes instead of %d. "
            msg += "Check the number of modes you asked to compute and/or consider increasing cut-off distance."
            msg += "The maximum number of modes allowed by the method for atomic normal mode analysis is 6 times"
            msg += "the number of RTB blocks and for pseudoatomic normal mode analysis 3 times the number of pseudoatoms. "
            msg += "However, the protocol allows only up to 200 modes as 20-100 modes are usually enough. If the number of"
            msg += "modes is below the minimum between these two numbers, consider increasing cut-off distance."
            self._printWarnings(redStr(msg % (len(fnVec), numberOfModes)))
            print redStr('Warning: There are only %d modes instead of %d.' %
                         (len(fnVec), numberOfModes))
            print redStr(
                "Check the number of modes you asked to compute and/or consider increasing cut-off distance."
            )
            print redStr(
                "The maximum number of modes allowed by the method for atomic normal mode analysis is 6 times"
            )
            print redStr(
                "the number of RTB blocks and for pseudoatomic normal mode analysis 3 times the number of pseudoatoms."
            )
            print redStr(
                "However, the protocol allows only up to 200 modes as 20-100 modes are usually enough. If the number of"
            )
            print redStr(
                "modes is below the minimum between these two numbers, consider increasing cut-off distance."
            )

        fnDiag = "diagrtb.eigenfacs"

        if structureEM:
            self.runJob("nma_reformatForElNemo.sh",
                        "%d" % len(fnVec),
                        env=getNMAEnviron())
            fnDiag = "diag_arpack.eigenfacs"

        self.runJob("echo",
                    "%s | nma_check_modes" % fnDiag,
                    env=getNMAEnviron())
        cleanPath(fnDiag)

        fh = open("Chkmod.res")
        mdOut = xmipp.MetaData()
        collectivityList = []

        for n in range(len(fnVec)):
            line = fh.readline()
            collectivity = float(line.split()[1])
            collectivityList.append(collectivity)

            objId = mdOut.addObject()
            modefile = self._getPath("modes", "vec.%d" % (n + 1))
            mdOut.setValue(xmipp.MDL_NMA_MODEFILE, modefile, objId)
            mdOut.setValue(xmipp.MDL_ORDER, long(n + 1), objId)

            if n >= 6:
                mdOut.setValue(xmipp.MDL_ENABLED, 1, objId)
            else:
                mdOut.setValue(xmipp.MDL_ENABLED, -1, objId)
            mdOut.setValue(xmipp.MDL_NMA_COLLECTIVITY, collectivity, objId)

            if collectivity < collectivityThreshold:
                mdOut.setValue(xmipp.MDL_ENABLED, -1, objId)
        fh.close()
        idxSorted = [
            i[0]
            for i in sorted(enumerate(collectivityList), key=lambda x: x[1])
        ]

        score = []
        for j in range(len(fnVec)):
            score.append(0)

        modeNum = []
        l = 0
        for k in range(len(fnVec)):
            modeNum.append(k)
            l += 1

        #score = [0]*numberOfModes
        for i in range(len(fnVec)):
            score[i] += i + 1
            score[idxSorted[i]] += modeNum[i] - i
        i = 0
        for objId in mdOut:
            score_i = float(score[i]) / (2.0 * l)
            mdOut.setValue(xmipp.MDL_NMA_SCORE, score_i, objId)
            i += 1
        mdOut.write("modes%s.xmd" % suffix)
        cleanPath("Chkmod.res")

        self._leaveWorkingDir()
Пример #24
0
    def _applyOperations(self, operation=None):
        """
        Execute one operation. If operation is None, then execute the operation
        list
        """
        # Take the standard system out and errors
        oldstdout = sys.stdout
        oldstderr = sys.stderr
        sys.stdout = self.fileLog
        sys.stderr = self.fileLogErr
        strErr = None
        defaultModeMessage = 'Executing...'

        message = pwgui.FloatingMessage(self.operationTree,
                                        defaultModeMessage,
                                        xPos=300,
                                        yPos=20)
        message.show()
        for op in self.operationList.getOperations(operation):
            item = op.getObjName()
            try:
                self.operationTree.processing_item(item)
                self.operationTree.update()
                op.runOperation(self.numberProcessors.get())
                self.operationTree.installed_item(item)
                self.operationTree.update()
                self.Textlog.refreshAll(goEnd=True)
                self.Textlog.update()
                if (op.getObjStatus() == PluginStates.INSTALL
                        or op.getObjStatus() == PluginStates.TO_UPDATE):
                    if op.getObjType() == PluginStates.PLUGIN:
                        self.reloadInstalledPlugin(item)
                    else:
                        self.tree.check_item(item)
                else:
                    self.tree.uncheck_item(item)
            except AssertionError as err:
                self.operationTree.failure_item(item)
                if op.getObjType() == PluginStates.BINARY:
                    self.reloadInstalledPlugin(op.getObjParent())
                else:
                    self.reloadInstalledPlugin(item)
                self.operationTree.update()
                strErr = str('Error executing the operation: ' +
                             op.getObjStatus() + ' ' + op.getObjName())
                self.plug_log.info(redStr(strErr), False)
                self.plug_errors_log.error(redStr(strErr), False)
                self.Textlog.refreshAll(goEnd=True)
                self.Textlog.update()
        self.operationList.clearOperations()
        sys.stdout.flush()
        sys.stderr.flush()
        sys.stdout = oldstdout
        sys.stderr = oldstderr
        # Enable the treeview
        self.tree.enable()
        message.close()
        text = 'FINISHED SUCCESSFULLY'
        tag = PluginStates.SUCCESS
        self.operationTree.tag_configure(PluginStates.SUCCESS,
                                         foreground='green')

        if strErr is not None:
            text = 'FINISHED WITH ERRORS'
            tag = PluginStates.ERRORS
            self.operationTree.tag_configure(PluginStates.ERRORS,
                                             foreground='red')

        self.operationTree.insert("",
                                  'end',
                                  text,
                                  text=text,
                                  value=text,
                                  tags=tag)