Beispiel #1
0
def addMkDirJob(workflow=None, mkdir=None, outputDir=None, namespace=None, version=None,\
   parentJobLs=None, extraDependentInputLs=None):
    """
	2012.10.2, increment workflow.no_of_jobs
	2012.9.11
		make sure that parentJobLs and extraDependentInputLs are not None.
	2012.3.10
		add argument parentJobLs, extraDependentInputLs
	2011-11-28
		get namespace and version from workflow first
	2011-9-14
	"""
    # Add a mkdir job for any directory.
    job = Job(namespace=getattr(workflow, 'namespace', namespace), name=mkdir.name, \
       version=getattr(workflow, 'version', version))
    job.addArguments(outputDir)
    job.folder = outputDir  #custom attribute
    job.output = outputDir  #custom attribute
    workflow.addJob(job)
    if parentJobLs:
        for parentJob in parentJobLs:
            if parentJob:
                workflow.depends(parent=parentJob, child=job)
    if extraDependentInputLs:
        for input in extraDependentInputLs:
            if input is not None:
                job.uses(input, transfer=True, register=True, link=Link.INPUT)
    if hasattr(workflow, 'no_of_jobs'):  #2012.10.2
        workflow.no_of_jobs += 1
    return job
	def addReadCountJob(self, workflow, VariousReadCountJava=None, GenomeAnalysisTKJar=None,\
					refFastaFList=None, bamF=None, baiF=None, readCountOutputF=None,\
					parentJobLs=[], job_max_memory = 1000, extraArguments="", \
					transferOutput=False):
		"""
		2011-11-25
		"""
		javaMemRequirement = "-Xms128m -Xmx%sm"%job_max_memory
		refFastaF = refFastaFList[0]
		job = Job(namespace=workflow.namespace, name=VariousReadCountJava.name, version=workflow.version)
		job.addArguments(javaMemRequirement, '-jar', GenomeAnalysisTKJar, "-T", "VariousReadCount",\
			'-R', refFastaF, '-o', readCountOutputF, "-mmq 30")
		job.addArguments("-I", bamF)
		if extraArguments:
			job.addArguments(extraArguments)
		self.addJobUse(job, file=GenomeAnalysisTKJar, transfer=True, register=True, link=Link.INPUT)
		job.uses(bamF, transfer=True, register=True, link=Link.INPUT)
		job.uses(baiF, transfer=True, register=True, link=Link.INPUT)
		self.registerFilesAsInputToJob(job, refFastaFList)
		job.output = readCountOutputF
		job.uses(readCountOutputF, transfer=transferOutput, register=True, link=Link.OUTPUT)
		workflow.addJob(job)
		yh_pegasus.setJobProperRequirement(job, job_max_memory=job_max_memory)
		for parentJob in parentJobLs:
			workflow.depends(parent=parentJob, child=job)
		"""
		#2013.3.24 should use this
		job = self.addGATKJob(self, workflow=None, executable=None, GenomeAnalysisTKJar=None, GATKAnalysisType=None,\
					inputFile=None, inputArgumentOption=None, refFastaFList=None, inputFileList=None,\
					argumentForEachFileInInputFileList=None,\
					interval=None, outputFile=None, \
					parentJobLs=None, transferOutput=True, job_max_memory=2000,\
					frontArgumentList=None, extraArguments=None, extraArgumentList=None, extraOutputLs=None, \
					extraDependentInputLs=None, no_of_cpus=None, walltime=120, **keywords)
		"""
		return job