コード例 #1
0
 def rollback(self):
     dialogResult = self.showWarningDialog()
     if dialogResult == 'Yes':
         version = str(self.current_item.text())[1:]
         dirPath = os.path.join(amu.getUserCheckoutDir(), os.path.basename(os.path.dirname(self.ORIGINAL_FILE_NAME)))
         print dirPath
         cmd.file(force=True, new=True)
         amu.setVersion(dirPath, int(version))
         self.close()
コード例 #2
0
 def rollback(self):
     dialogResult = self.show_warning_dialog()
     if dialogResult == 'Yes':
         version = str(self.current_item.text())[1:]
         dirPath = os.path.join(
             amu.getUserCheckoutDir(),
             os.path.basename(os.path.dirname(self.ORIGINAL_FILE_NAME)))
         print dirPath
         cmd.file(force=True, new=True)
         amu.setVersion(dirPath, int(version))
         self.close()
コード例 #3
0
import utilities as util
from math import *
from string import Template

import Multithreading as mthread

"""@package GroupedRefinementModule
Package runs refinement phases: RefineA, RefineB, refineNGS, and refineFinal.
Grouped refinement splits refinement into two parts in order to increase CPU
efficiency. First part does alignment of molecule maps to contigs, and second
part does refinement.

"""


util.setVersion("$Id: GroupedRefinementModule.py 4120 2015-09-17 20:58:31Z wandrews $")

           
#see comment above init
class Refine(mthread.jobWrapper):
    """Replaces the old RefineA and RefineB classes by merging them
    
    Combines RefineA, RefineB, refineNGS, and refineFinal
    """
    #refineStage is a string specifying what stage of refinement you're in
    #must be 'refineA', 'refineB', 'refineNGS', or 'refineFinal' (later)
    def __init__(self, StageName, varsP):
        self.refineStage = StageName
        self.multigroup = True #if False, force single group (not normally good)
        self.varsP = varsP
        ContigPrefix = self.varsP.expID + "_" + StageName
コード例 #4
0
import os

import mapClasses
#import MapClassesRev
#import RefinementModule as rm
import Multithreading as mthread
import utilities as util

"""
@package CharacterizeModule 
Get general stats and mapping stats (if reference) for contigs

"""


util.setVersion("$Id: CharacterizeModule.py 4051 2015-08-17 19:15:56Z wandrews $")


class dummyCharacterize() :
    """For getting noise parameters in case of bypassing characterize."""
    def __init__(self, varsP) :
        self.curCharacterizeFileRoots = []
        self.varsP = varsP #bc Characterize uses this for totAssemblyLenMb
        #this is problematic for bypass (because mergeIntoSingleCmap isn't called)--don't need it
        #if not len(varsP.curCharacterizeCmaps) : #need this, set in mergeIntoSingleCmap
        #    return
        #ccc = varsP.curCharacterizeCmaps[0]
        #outFileName = os.path.split(ccc)[1].replace(".cmap", "")
        #outfile = os.path.join(varsP.contigAlignTarget,outFileName) #WRONG bc contigAlignTarget is wrong...try this

        outdir = os.path.join(varsP.outputContigFolder, self.varsP.characterizeDirName) #'alignref'
コード例 #5
0
import pdb
import math
import os

import utilities as util
"""@package MapClassesRev Interprate RefAligner map results for characterization

Read Cmap files and .map file and get contig statistics including mapping stats
"""

util.setVersion("$Id: MapClassesRev.py 3560 2015-02-12 18:40:59Z wandrews $")


def ContigCharacterizationNoRef(varsP, stgstr=""):
    """Report contig stats for cases without reference
    """
    qCmap = MultiCmap(varsP.latestMergedCmap)
    outStr = ''
    for i, cMap in enumerate(qCmap.cmapDB.values()):
        if i == 0:
            outStr += cMap.cmapHeader() + '\n'
        outStr += cMap.reportAlignStatsUnmapped() + '\n'

    k = 1e-6
    varsP.totAssemblyLenMb = k * qCmap.totalLen
    if stgstr:
        outStr += 'Stage Summary: %s\n' % stgstr
    outStr += 'N Genome Maps: %d\n' % qCmap.n
    outStr += 'Total Genome Map Len (Mb):   %6.3f\n' % (k * qCmap.totalLen)
    outStr += 'Avg. Genome Map Len  (Mb):   %6.3f\n' % (k * qCmap.aveLen)
    outStr += 'Genome Map n50       (Mb):   %6.3f\n' % (k * qCmap.n50)
コード例 #6
0
import os
#import pdb
import sys
from random import choice
import math
from collections import defaultdict
"""@package molecule Author of BNX file; aggregates detection results

"""

import utilities
utilities.setVersion("$Id: molecule.py 2494 2014-02-15 00:18:25Z wandrews $")

tab = '\t'
newline = '\n'


def lambdaAnchors(bpp=500):
    """In silico lambda bspqI reference (hard coded)
    """
    lambdaRef = [
        2396, 6488, 8701, 10369, 13285, 24768, 27233, 34326, 34799, 47711
    ]
    distPattern = []
    for i, val0 in enumerate(lambdaRef):
        for j, val1 in enumerate(lambdaRef):
            if i >= j:
                continue
            distPattern.append(abs(val0 - val1))
    scaleFactor = 1. / bpp
    return [scaleFactor * x for x in distPattern]
コード例 #7
0
import subprocess
import time
import shutil
#import pdb
import threading
from string import Template
"""@package Multithreading Controls queuing, process exectution, and process 
logging for all de novo phases

Schedules jobs on single node based on user defined thread limit
Submits jobs to drmaa scheduler for distributed execution
Checks result for expected outputs and logs computation and performance details
"""

import utilities
utilities.setVersion(
    "$Id: Multithreading.py 4882 2016-05-05 00:38:41Z wandrews $")

try:
    import drmaa
except:
    pass

global my_wait

if os.name == "nt":
    import ctypes
    global nt_handles_list
    global SYNCHRONIZE
    global INFINITE
    SYNCHRONIZE = 0x00100000
    INFINITE = -1
コード例 #8
0
import shutil
import pdb
import utilities as util
from math import *
from string import Template

import Multithreading as mthread
"""@package GroupedRefinementModule
Package runs refinement phases: RefineA, RefineB, refineNGS, and refineFinal.
Grouped refinement splits refinement into two parts in order to increase CPU
efficiency. First part does alignment of molecule maps to contigs, and second
part does refinement.

"""

util.setVersion(
    "$Id: GroupedRefinementModule.py 4814 2016-04-19 20:59:55Z wandrews $")


#see comment above init
class Refine(mthread.jobWrapper):
    """Replaces the old RefineA and RefineB classes by merging them
    
    Combines RefineA, RefineB, refineNGS, and refineFinal
    """

    #refineStage is a string specifying what stage of refinement you're in
    #must be 'refineA', 'refineB', 'refineNGS', or 'refineFinal' (later)
    def __init__(self, StageName, varsP):
        self.refineStage = StageName
        self.multigroup = True  #if False, force single group (not normally good)
        self.varsP = varsP
コード例 #9
0
#import pdb
import threading
from string import Template


"""@package Multithreading Controls queuing, process exectution, and process 
logging for all de novo phases

Schedules jobs on single node based on user defined thread limit
Submits jobs to drmaa scheduler for distributed execution
Checks result for expected outputs and logs computation and performance details
"""


import utilities
utilities.setVersion("$Id: Multithreading.py 4164 2015-09-28 23:40:23Z wandrews $")


try:
    import drmaa
except:
    pass


global my_wait

if os.name == "nt":
	import ctypes
	global nt_handles_list
	global SYNCHRONIZE
	global INFINITE
コード例 #10
0
import shutil
#from collections import OrderedDict

import utilities as util
import Multithreading as mthread

"""@package SampleCharModule Defines jobs to map bnx to reference and parses
mapping results

Optional pre-processor for De Novo assembly to characterize the single molecule
noise of the input. Reference required. Can help to inform run time noise 
arguments.
"""


util.setVersion("$Id: SampleCharModule.py 4889 2016-05-05 22:25:00Z wandrews $")


class SampleChar(mthread.jobWrapper):
    """Generates jobs for distributed single molecule mapping
    """
    def __init__(self, varsP):
        self.varsP = varsP
        stageName = '  Sample Characterization, Noise Levels'
        super(SampleChar, self).__init__(self.varsP, stageName, clusterArgs=varsP.getClusterArgs('sampleChar'))
        self.generateJobList()
    
    def runJobs(self):
        self.multiThreadRunJobs(self.varsP.nThreads, sleepTime = 0.2)
    
    def generateJobList(self):
コード例 #11
0
import os
from xml.dom.minidom import parse, parseString
import shutil
#import pdb

"""@package manageTargetLocation Metadata for IRYS intstrument/experiment
    
Reads instrument/experiment xml file and passes relevant information to 
ImageProcessingModule.Device() 
Also finds device tiff images

"""


import utilities
utilities.setVersion("$Id: manageTargetLocation.py 2686 2014-04-15 16:51:46Z vdergachev $")


class repositoryFolder():
    """Metadata container for instrument/experiment information subset. Pass info
    to ImageProcessingModule.Device() 
    
    Also finds device tiff images
    """
    def __init__(self, remoteDirectory, localDirectory, leadTiffTag = '', stretchFactor = 0.85):
        self.remoteDirectory = remoteDirectory
        self.localDirectory = localDirectory
        self.leadTiffTag = leadTiffTag
        self.FinalCycleIsFlush = ''        
        self.nColors = 0
        self.ExpectedOverlap = 0
コード例 #12
0
import os

import Multithreading as mthread

"""@package PairwiseModule Defines jobs for execution of pairwise comparison

Major operation modes:
Basic distributed pairwise jobs
Distributed pairwise jobs following hash result
Distributed pairwise jobs following distributed hash resuls
Distributed pairwise jobs using triangle input partitioning
"""

import math
import utilities as util
util.setVersion("$Id: PairwiseModule.py 4169 2015-09-30 19:10:49Z wandrews $")



class Pairwise(mthread.jobWrapper):
    """Populates Multithreading package for distributed pairwise jobs
    """
    def __init__(self, varsP):
        self.varsP = varsP
        stageName = 'Pairwise'
        mthread.jobWrapper.__init__(self, varsP, stageName,clusterArgs=varsP.getClusterArgs('pairwise'))
        self.generateJobList()
    
    def runJobs(self):
        self.multiThreadRunJobs(self.varsP.nThreads, sleepTime = 0.2)
    
コード例 #13
0
import os
#import pdb
import sys
from random import choice
import math
from collections import defaultdict

"""@package molecule Author of BNX file; aggregates detection results

"""


import utilities
utilities.setVersion("$Id: molecule.py 2494 2014-02-15 00:18:25Z wandrews $")


tab = '\t'
newline = '\n'

def lambdaAnchors(bpp = 500):
    """In silico lambda bspqI reference (hard coded)
    """
    lambdaRef = [2396,6488,8701,10369,13285,24768,27233,34326,34799,47711]
    distPattern = []
    for i, val0 in enumerate(lambdaRef):
        for j, val1 in enumerate(lambdaRef):
            if i >= j:
                continue
            distPattern.append(abs(val0-val1))
    scaleFactor = 1./bpp
    return [scaleFactor*x for x in distPattern]
コード例 #14
0
import os
#import shutil #for rmtree
import Multithreading as mthread
import utilities as util
import mapClasses as mc
"""
@package AlignModule
Align molecules to final contigs, typically refine Final contigs.
"""

util.setVersion("$Id: AlignModule.py 4307 2015-11-25 17:55:11Z wandrews $")


class AlignModule(mthread.jobWrapper):
    """Class for alignment of molecules to final contigs.
    """
    def __init__(self, varsP, doref=False, outputdir=None, bnxin=None):
        """doref determines parameter set from optargs.
        outputdir not needed for Pipeline, but used in runAlignMol.py.
        If bnxin supplied, will run single job with it.
        """
        self.varsP = varsP
        self.doref = doref
        self.bnxin = bnxin  #see generateJobList

        self.argStageName = 'alignmol'  #use arguments from alignmol (optArgs, not clusterArgs)
        if not doref:
            self.stageName = 'alignmol'  #also name of dir which is sub-dir of varsP.outputContigFolder
            self.alignTarget = os.path.join(varsP.outputContigFolder,
                                            self.stageName)  #output dir
            self.varsP.alignMolDir = self.alignTarget  #store in varsP for subsequent processing
コード例 #15
0
import os
from xml.dom.minidom import parse, parseString
import shutil
#import pdb
"""@package manageTargetLocation Metadata for IRYS intstrument/experiment
    
Reads instrument/experiment xml file and passes relevant information to 
ImageProcessingModule.Device() 
Also finds device tiff images

"""

import utilities
utilities.setVersion(
    "$Id: manageTargetLocation.py 2686 2014-04-15 16:51:46Z vdergachev $")


class repositoryFolder():
    """Metadata container for instrument/experiment information subset. Pass info
    to ImageProcessingModule.Device() 
    
    Also finds device tiff images
    """
    def __init__(self,
                 remoteDirectory,
                 localDirectory,
                 leadTiffTag='',
                 stretchFactor=0.85):
        self.remoteDirectory = remoteDirectory
        self.localDirectory = localDirectory
        self.leadTiffTag = leadTiffTag
コード例 #16
0
#import subprocess
#import time
import os
#import pdb

import Multithreading as mthread

"""@package AssemblyModule For defining assembly worker (singleJob)

"""


import utilities
utilities.setVersion("$Id: AssemblyModule.py 3928 2015-07-08 01:17:25Z vdergachev $")


class Assemble(mthread.jobWrapper):
    """ Class to run the assembly phase of de novo assembly
    
    """
    
    def __init__(self, varsP):
        self.varsP = varsP
        stageName = 'Assembly'
        mthread.jobWrapper.__init__(self, varsP, stageName, clusterArgs=varsP.getClusterArgs('assembly'))
        self.contigString = varsP.expID + '_unrefined'
        varsP.prepareContigIO(self.contigString, stageName)
        self.varsP.stageName=stageName
        self.contigsFile = None
        self.generateJobList()
    
コード例 #17
0
import os

import Multithreading as mthread

"""@package RefinementModule Defines jobs for refinement, extension and merging 
operations


"""


import utilities
utilities.setVersion("$Id: RefinementModule.py 4032 2015-08-12 18:43:34Z wandrews $")


#this class replaces the old RefineA and RefineB classes by merging them
#see comment above init
class Refine(mthread.jobWrapper):
    """refineStage is a string specifying what stage of refinement you're in
    must be 'refineA', 'refineB', 'refineNGS', or 'refineFinal' (later)
    """
    def __init__(self, refineStage, varsP):
        validstages = ['refineA', 'refineB', 'refineNGS', 'refineFinal']
        if not refineStage in validstages :
            varsP.error += 1
            varsP.message += '  Error: Refine stage name invalid: '+str(refineStage)+'\n'
            return
        self.refineStage = refineStage
        self.varsP = varsP
        utilities.LogStatus("progress", "stage_start", self.refineStage)
        #super is more pythonic than referring to the base class explicitly (only matters for multiple inheritance)
コード例 #18
0
import shutil
#from collections import OrderedDict

import utilities as util
import Multithreading as mthread

"""@package SampleCharModule Defines jobs to map bnx to reference and parses
mapping results

Optional pre-processor for De Novo assembly to characterize the single molecule
noise of the input. Reference required. Can help to inform run time noise 
arguments.
"""


util.setVersion("$Id: SampleCharModule.py 4288 2015-11-19 17:54:37Z wandrews $")


class SampleChar(mthread.jobWrapper):
    """Generates jobs for distributed single molecule mapping
    """
    def __init__(self, varsP):
        self.varsP = varsP
        stageName = '  Sample Characterization, Noise Levels'
        super(SampleChar, self).__init__(self.varsP, stageName, clusterArgs=varsP.getClusterArgs('sampleChar'))
        self.generateJobList()
    
    def runJobs(self):
        self.multiThreadRunJobs(self.varsP.nThreads, sleepTime = 0.2)
    
    def generateJobList(self):
コード例 #19
0
import pdb
import math
import os

import utilities as util

"""@package MapClassesRev Interprate RefAligner map results for characterization

Read Cmap files and .map file and get contig statistics including mapping stats
"""


util.setVersion("$Id: MapClassesRev.py 3560 2015-02-12 18:40:59Z wandrews $")


def ContigCharacterizationNoRef(varsP, stgstr=""):
    """Report contig stats for cases without reference
    """
    qCmap = MultiCmap(varsP.latestMergedCmap)
    outStr = ''
    for i,cMap in enumerate(qCmap.cmapDB.values()):
        if i==0:
            outStr += cMap.cmapHeader() + '\n'
        outStr += cMap.reportAlignStatsUnmapped() + '\n'

    k = 1e-6
    varsP.totAssemblyLenMb = k * qCmap.totalLen
    if stgstr :
        outStr += 'Stage Summary: %s\n' % stgstr
    outStr += 'N Genome Maps: %d\n' % qCmap.n
    outStr += 'Total Genome Map Len (Mb):   %6.3f\n' % (k * qCmap.totalLen)
コード例 #20
0
import os
#import pdb

import Multithreading as mthread
import manageTargetLocation as mtl
import molecule
import utilities as util

"""@package ImageProcessingModule Extract BNX data from raw tiff data

Currently not functional for cluster (file transfer considerations)
Read tiff, run DM-static, Rescale for lambda, generate bnx file
"""


util.setVersion("$Id: ImageProcessingModule.py 3566 2015-02-13 20:30:19Z wandrews $")


#quality is to turn on QX lines in bnx
#forceonecolor is for debugging only -- use False
#now return 1 for any errors, return 0 (or None) for success
def performImageAnalysis(varsP, bypass=False, quality=True, forceonecolor=False):
    """Top level function for instrument scaling, image handling, bnx encoding
    
    """
    #print "bypass = "******"ERROR in performImageAnalysis: no images found in paths in "+varsP.imgFile+"\n")
        return 1 #this is an error--new convention is to return 1 on error
コード例 #21
0
import os

import Multithreading as mthread
"""@package RefinementModule Defines jobs for refinement, extension and merging 
operations


"""

import utilities
utilities.setVersion(
    "$Id: RefinementModule.py 4866 2016-05-02 19:34:02Z wandrews $")


#this class replaces the old RefineA and RefineB classes by merging them
#see comment above init
class Refine(mthread.jobWrapper):
    """refineStage is a string specifying what stage of refinement you're in
    must be 'refineA', 'refineB', 'refineNGS', or 'refineFinal' (later)
    """
    def __init__(self, refineStage, varsP):
        validstages = ['refineA', 'refineB', 'refineNGS', 'refineFinal']
        if not refineStage in validstages:
            varsP.error += 1
            varsP.message += '  Error: Refine stage name invalid: ' + str(
                refineStage) + '\n'
            return
        self.refineStage = refineStage
        self.varsP = varsP
        utilities.LogStatus("progress", "stage_start", self.refineStage)
        #super is more pythonic than referring to the base class explicitly (only matters for multiple inheritance)
コード例 #22
0
import os
#import shutil #for rmtree
import Multithreading as mthread
import utilities as util
import mapClasses as mc

"""
@package AlignModule
Align molecules to final contigs, typically refine Final contigs.
"""

util.setVersion("$Id: AlignModule.py 4307 2015-11-25 17:55:11Z wandrews $")


class AlignModule(mthread.jobWrapper):
    """Class for alignment of molecules to final contigs.
    """

    def __init__(self, varsP, doref=False, outputdir=None, bnxin=None):
        """doref determines parameter set from optargs.
        outputdir not needed for Pipeline, but used in runAlignMol.py.
        If bnxin supplied, will run single job with it.
        """
        self.varsP = varsP
        self.doref = doref
        self.bnxin = bnxin #see generateJobList

        self.argStageName = 'alignmol' #use arguments from alignmol (optArgs, not clusterArgs)
        if not doref :
            self.stageName = 'alignmol' #also name of dir which is sub-dir of varsP.outputContigFolder
            self.alignTarget = os.path.join(varsP.outputContigFolder, self.stageName) #output dir
コード例 #23
0
import os

import mapClasses
#import MapClassesRev
#import RefinementModule as rm
import Multithreading as mthread
import utilities as util
"""
@package CharacterizeModule 
Get general stats and mapping stats (if reference) for contigs

"""

util.setVersion(
    "$Id: CharacterizeModule.py 4890 2016-05-05 22:26:07Z wandrews $")


class dummyCharacterize():
    """For getting noise parameters in case of bypassing characterize."""
    def __init__(self, varsP):
        self.curCharacterizeFileRoots = []
        self.varsP = varsP  #bc Characterize uses this for totAssemblyLenMb
        #this is problematic for bypass (because mergeIntoSingleCmap isn't called)--don't need it
        #if not len(varsP.curCharacterizeCmaps) : #need this, set in mergeIntoSingleCmap
        #    return
        #ccc = varsP.curCharacterizeCmaps[0]
        #outFileName = os.path.split(ccc)[1].replace(".cmap", "")
        #outfile = os.path.join(varsP.contigAlignTarget,outFileName) #WRONG bc contigAlignTarget is wrong...try this

        outdir = os.path.join(varsP.outputContigFolder,
                              self.varsP.characterizeDirName)  #'alignref'
コード例 #24
0
import os

import Multithreading as mthread

"""@package PairwiseModule Defines jobs for execution of pairwise comparison

Major operation modes:
Basic distributed pairwise jobs
Distributed pairwise jobs following hash result
Distributed pairwise jobs following distributed hash resuls
Distributed pairwise jobs using triangle input partitioning
"""

import math
import utilities as util
util.setVersion("$Id: PairwiseModule.py 4960 2016-05-18 22:07:57Z wandrews $")



class Pairwise(mthread.jobWrapper):
    """Populates Multithreading package for distributed pairwise jobs
    """
    def __init__(self, varsP):
        self.varsP = varsP
        stageName = 'Pairwise'
        mthread.jobWrapper.__init__(self, varsP, stageName,clusterArgs=varsP.getClusterArgs('pairwise'))
        self.generateJobList()
    
    def runJobs(self):
        self.multiThreadRunJobs(self.varsP.nThreads, sleepTime = 0.2)