Example #1
0
 def makeNanoporeRead(f5_path):
     # here we load the NanoporeRead and write it to a file
     np = NanoporeRead(fast_five_file=f5_path, twoD=False)  # make this a config arg
     ok = np.Initialize(job)
     if not ok:
         return None
     _l = np.read_label
     tF = job.fileStore.getLocalTempFile()
     fH = open(tF, "w")
     ok = np.Write(job, fH, initialize=False)
     if not ok:
         fH.close()
         return None
     fH.close()
     # then we gzip it and deliver it to the readstore and return the ledger line
     fn = LocalFile(workdir=workdir, filename="%s.np.gz" % _l)
     fH = open(tF, "rb")
     gz = gzip.open(fn.fullpathGetter(), "wb")
     shutil.copyfileobj(fH, gz)
     fH.close()
     gz.close()
     try:
         deliverOutput(job, fn, readstore_dir)
     except RuntimeError:
         job.fileStore.logToMaster("[makeNanoporeReadsJobFunction]Read %s failed to upload" % _l)
         return None
     return (_l, "%s%s\n" % (readstore_dir, fn.filenameGetter()))
Example #2
0
 def _SignalMachine(read_label, cigar, nanopore_read):
     guide_aln = LocalFile(workdir=workdir)
     _handle = open(guide_aln.fullpathGetter(), "w")
     _handle.write(cigar)
     _handle.close()
     require(os.path.exists(guide_aln.fullpathGetter()),
             "NO guide aln file")
     signalMachine_args = [
         "--sm3Hdp",
         "-s",
         "1",
         "-o",
         "%s" % degenerate_enum,
         "-L",
         "%s" % read_label,
         "-T",
         "%s%s" % (DOCKER_DIR, models.localFileName(hmmfid)),
         "-q",
         "%s%s" % (DOCKER_DIR, nanopore_read.filenameGetter()),
         "-f",
         "%s%s" % (DOCKER_DIR, fw_seqfile.filenameGetter()),
         "-b",
         "%s%s" % (DOCKER_DIR, bw_seqfile.filenameGetter()),
         "-p",
         "%s%s" % (DOCKER_DIR, guide_aln.filenameGetter()),
         "-u",
         "%s%s" % (DOCKER_DIR, posteriors.filenameGetter()),
         "-v",
         "%s%s" % (DOCKER_DIR, models.localFileName(hdpfid)),
     ]
     try:
         docker_call(job=job,
                     tool=signalMachine_image,
                     parameters=signalMachine_args,
                     work_dir=(workdir + "/"))
     except subprocess.CalledProcessError:
         pass
def getFastqFromBam(job,
                    bam_sample,
                    samtools_image="quay.io/ucsc_cgl/samtools"):
    # n.b. this is NOT a jobFunctionWrappingJob, it just takes the parent job as
    # an argument to have access to the job store
    # download the BAM to the local directory, use a uid to aviod conflicts
    uid = uuid.uuid4().hex
    work_dir = job.fileStore.getLocalTempDir()
    local_bam = LocalFile(workdir=work_dir, filename="bam_{}.bam".format(uid))
    fastq_reads = LocalFile(workdir=work_dir,
                            filename="fastq_reads{}.fq".format(uid))

    urlDownload(parent_job=job,
                source_url=bam_sample.URL,
                destination_file=local_bam)

    require(not os.path.exists(fastq_reads.fullpathGetter()),
            "[getFastqFromBam]fastq file already exists")

    # run samtools to get the reads from the BAM
    # TODO use DOCKER_DIR and clean this up. idea: make globls.py or something
    samtools_parameters = [
        "fastq", "/data/{}".format(local_bam.filenameGetter())
    ]
    with open(fastq_reads.fullpathGetter(), 'w') as fH:
        docker_call(job=job,
                    tool=samtools_image,
                    parameters=samtools_parameters,
                    work_dir=work_dir,
                    outfile=fH)

    require(os.path.exists(fastq_reads.fullpathGetter()),
            "[getFastqFromBam]didn't generate reads")

    # upload fastq to fileStore
    return job.fileStore.writeGlobalFile(fastq_reads.fullpathGetter())