예제 #1
0
    def test_guess_file_format(self):
        """Test guess_file_format."""
        from pbtranscript.Utils import guess_file_format, FILE_FORMATS
        fn1 = op.join(self.sivDataDir, "bigbam", "ccsbam.fofn")
        self.assertTrue(guess_file_format(fn1) == FILE_FORMATS.BAM)

        fn2 = op.join(self.sivDataDir, "bigbam", "bas.fofn")
        self.assertTrue(guess_file_format(fn2) == FILE_FORMATS.H5)

        self.assertTrue(guess_file_format([fn1, fn2]) == FILE_FORMATS.UNKNOWN)
예제 #2
0
def ice_fa2fq(in_fa, ccs_fofn, out_fq):
    """Convert an input FASTA file to an output FASTQ file,
       reading QVs from the input ccs.h5, ccs.bam or ccs FOFN.
    """
    ccs_fns = get_files_from_file_or_fofn(ccs_fofn)
    fmt = guess_file_format(ccs_fns)

    if fmt == FILE_FORMATS.H5:
        qver = basQVcacher()
        for ccs_fn in ccs_fns:
            qver.add_bash5(ccs_fn)
        bas_handlers = {}
    elif fmt == FILE_FORMATS.BAM:
        qver = BamCollection(*ccs_fns)
    else:
        raise IOError("ice_fa2fq does not support input %s." % ccs_fofn)

    with ContigSetReaderWrapper(in_fa) as reader, \
            FastqWriter(out_fq) as writer:
        for r in reader:
            logging.debug("Getting QVs for {name} ...".format(name=r.name))
            seqid = r.name.split(' ')[0]
            parsed_read_name = _Parsed_Read_Name(seqid)
            if fmt == FILE_FORMATS.H5:
                try:
                    bas_file = qver.bas_files[parsed_read_name.movie][seqid]
                    if bas_file not in bas_handlers:
                        bas_handlers[bas_file] = BasH5Reader(bas_file)
                except KeyError:
                    raise IOError("Could not read {s} from {f}.".format(
                        s=seqid, f=ccs_fofn))
                qvs = get_qv_from_bas_handler(
                    bas_handler=bas_handlers[bas_file],
                    parsed_read_name=parsed_read_name,
                    qv_name="QualityValue")
            elif fmt == FILE_FORMATS.BAM:
                qvs = get_qvs_from_bam(reader=qver,
                                       parsed_read_name=parsed_read_name,
                                       qv_name="QualityValue")
            else:
                assert False

            if len(r.sequence) != len(qvs):
                raise ValueError(
                    "Sequence and QVs of {r} should be the same!".format(
                        r=r.name))
            writer.writeRecord(r.name, r.sequence[:], qvs)

    if fmt == FILE_FORMATS.H5:
        for bas_file, bas_handler in bas_handlers.iteritems():
            logging.debug("Closing {bas_file} ...".format(bas_file=bas_file))
            bas_handler.close()
    elif fmt == FILE_FORMATS.BAM:
        qver.close()
예제 #3
0
    def validate_inputs(self):
        """Validate input fofns, and root_dir, log_dir, tmp_dir,
        create quivered_dir and quivered_log_dir"""
        self.add_log("Validating inputs.")

        # Create directories: root_dir/quivered and root_dir/log_dir/quivered
        try:
            mkdir(self.quivered_dir)
            mkdir(self.quivered_log_dir)
        except OSError:
            # Multiple ice_quiver_i jobs may run at the same time and try to
            # mkdir, race condition may happen, so ignore OSError here.
            pass

        errMsg = ""

        if not nfs_exists(self.log_dir) or not op.isdir(self.log_dir):
            errMsg = "Log dir {l} is not an existing directory.".\
                format(l=self.log_dir)
        elif self.bas_fofn is None:
            errMsg = "Please specify subreads file (e.g., --bas_fofn=input.fofn|subreadset.xml)."
        elif not nfs_exists(self.bas_fofn):
            errMsg = "Specified subreads file (bas_fofn={f}) does not exist.".format(
                f=self.bas_fofn)
        elif not nfs_exists(self.nfl_all_pickle_fn):
            #"output/map_noFL/noFL.ALL.partial_uc.pickle"):
            errMsg = "Pickle file {f} ".format(f=self.nfl_all_pickle_fn) + \
                     "which assigns all non-full-length reads to isoforms " + \
                     "does not exist. Please check 'ice_partial.py *' are " + \
                     "all done."
        elif not nfs_exists(self.final_pickle_fn):
            errMsg = "Pickle file {f} ".format(f=self.final_pickle_fn) + \
                     "which assigns full-length non-chimeric reads to " + \
                     "isoforms does not exist."

        if self.bas_fofn is not None and \
            guess_file_format(self.bas_fofn) is not FILE_FORMATS.BAM:
            # No need to convert subreads.bam to fasta
            if self.fasta_fofn is None:
                errMsg = "Please make sure ice_make_fasta_fofn has " + \
                         "been called, and specify fasta_fofn."
            elif not nfs_exists(self.fasta_fofn):
                errMsg = "Input fasta_fofn {f} does not exists.".\
                         format(f=self.fasta_fofn)
                fasta_files = get_files_from_file_or_fofn(self.fasta_fofn)
                for fasta_file in fasta_files:
                    if not nfs_exists(fasta_file):
                        errMsg = "A file {f} in fasta_fofn does not exist.".\
                                 format(f=fasta_file)

        if errMsg != "":
            self.add_log(errMsg, level=logging.ERROR)
            raise IOError(errMsg)
예제 #4
0
def ice_fa2fq(in_fa, ccs_fofn, out_fq):
    """Convert an input FASTA file to an output FASTQ file,
       reading QVs from the input ccs.h5, ccs.bam or ccs FOFN.
    """
    ccs_fns = get_files_from_file_or_fofn(ccs_fofn)
    fmt = guess_file_format(ccs_fns)

    if fmt == FILE_FORMATS.H5:
        qver = basQVcacher()
        for ccs_fn in ccs_fns:
            qver.add_bash5(ccs_fn)
        bas_handlers = {}
    elif fmt == FILE_FORMATS.BAM:
        qver = BamCollection(*ccs_fns)
    else:
        raise IOError("ice_fa2fq does not support input %s." %
                      ccs_fofn)

    with ContigSetReaderWrapper(in_fa) as reader, \
            FastqWriter(out_fq) as writer:
        for r in reader:
            logging.debug("Getting QVs for {name} ...".format(name=r.name))
            seqid = r.name.split(' ')[0]
            parsed_read_name = _Parsed_Read_Name(seqid)
            if fmt == FILE_FORMATS.H5:
                try:
                    bas_file = qver.bas_files[parsed_read_name.movie][seqid]
                    if bas_file not in bas_handlers:
                        bas_handlers[bas_file] = BasH5Reader(bas_file)
                except KeyError:
                    raise IOError("Could not read {s} from {f}.".
                                  format(s=seqid, f=ccs_fofn))
                qvs = get_qv_from_bas_handler(bas_handler=bas_handlers[bas_file],
                                              parsed_read_name=parsed_read_name,
                                              qv_name="QualityValue")
            elif fmt == FILE_FORMATS.BAM:
                qvs = get_qvs_from_bam(reader=qver,
                                       parsed_read_name=parsed_read_name,
                                       qv_name="QualityValue")
            else:
                assert False

            if len(r.sequence) != len(qvs):
                raise ValueError("Sequence and QVs of {r} should be the same!".
                                 format(r=r.name))
            writer.writeRecord(r.name, r.sequence[:], qvs)

    if fmt == FILE_FORMATS.H5:
        for bas_file, bas_handler in bas_handlers.iteritems():
            logging.debug("Closing {bas_file} ...".format(bas_file=bas_file))
            bas_handler.close()
    elif fmt == FILE_FORMATS.BAM:
        qver.close()
예제 #5
0
    def validate_inputs(self):
        """Validate input fofns, and root_dir, log_dir, tmp_dir,
        create quivered_dir and quivered_log_dir"""
        self.add_log("Validating inputs.")

        # Create directories: root_dir/quivered and root_dir/log_dir/quivered
        try:
            mkdir(self.quivered_dir)
            mkdir(self.quivered_log_dir)
        except OSError:
            # Multiple ice_quiver_i jobs may run at the same time and try to
            # mkdir, race condition may happen, so ignore OSError here.
            pass

        errMsg = ""

        if not nfs_exists(self.log_dir) or not op.isdir(self.log_dir):
            errMsg = "Log dir {l} is not an existing directory.".\
                format(l=self.log_dir)
        elif self.bas_fofn is None:
            errMsg = "Please specify subreads file (e.g., --bas_fofn=input.fofn|subreadset.xml)."
        elif not nfs_exists(self.bas_fofn):
            errMsg = "Specified subreads file (bas_fofn={f}) does not exist.".format(f=self.bas_fofn)
        elif not nfs_exists(self.nfl_all_pickle_fn):
            #"output/map_noFL/noFL.ALL.partial_uc.pickle"):
            errMsg = "Pickle file {f} ".format(f=self.nfl_all_pickle_fn) + \
                     "which assigns all non-full-length reads to isoforms " + \
                     "does not exist. Please check 'ice_partial.py *' are " + \
                     "all done."
        elif not nfs_exists(self.final_pickle_fn):
            errMsg = "Pickle file {f} ".format(f=self.final_pickle_fn) + \
                     "which assigns full-length non-chimeric reads to " + \
                     "isoforms does not exist."

        if self.bas_fofn is not None and \
            guess_file_format(self.bas_fofn) is not FILE_FORMATS.BAM:
            # No need to convert subreads.bam to fasta
            if self.fasta_fofn is None:
                errMsg = "Please make sure ice_make_fasta_fofn has " + \
                         "been called, and specify fasta_fofn."
            elif not nfs_exists(self.fasta_fofn):
                errMsg = "Input fasta_fofn {f} does not exists.".\
                         format(f=self.fasta_fofn)
                fasta_files = get_files_from_file_or_fofn(self.fasta_fofn)
                for fasta_file in fasta_files:
                    if not nfs_exists(fasta_file):
                        errMsg = "A file {f} in fasta_fofn does not exist.".\
                                 format(f=fasta_file)

        if errMsg != "":
            self.add_log(errMsg, level=logging.ERROR)
            raise IOError(errMsg)
예제 #6
0
    def index_input_subreads(self):
        """Index input subreads in self.fasta_fofn or self.bas_fofn.
        """
        if guess_file_format(self.bas_fofn) == FILE_FORMATS.BAM:
            msg = "Indexing files in %s, please wait." % self.bas_fofn
            self.add_log(msg)
            d = BamCollection(self.bas_fofn)
        else:
            msg = "Indexing files in %s, please wait." % self.fasta_fofn
            self.add_log(msg)
            d = MetaSubreadFastaReader(get_files_from_file_or_fofn(self.fasta_fofn))

        self.add_log("File indexing done.")
        return d
예제 #7
0
    def index_input_subreads(self):
        """Index input subreads in self.fasta_fofn or self.bas_fofn.
        """
        if guess_file_format(self.bas_fofn) == FILE_FORMATS.BAM:
            msg = "Indexing files in %s, please wait." % self.bas_fofn
            self.add_log(msg)
            d = BamCollection(self.bas_fofn)
        else:
            msg = "Indexing files in %s, please wait." % self.fasta_fofn
            self.add_log(msg)
            d = MetaSubreadFastaReader(get_files_from_file_or_fofn(self.fasta_fofn))

        self.add_log("File indexing done.")
        return d
예제 #8
0
def _sanity_check_args(args):
    """Sanity check tofu arguments."""
    # Check required arguments
    if args.nfl_fa is None:
        raise ValueError("--nfl_fa must be provided for tofu_wrap. Quit.")
    if args.bas_fofn is None:
        raise ValueError(
            "--bas_fofn must be provided for polishing isoforms. Quit.")
    if not args.quiver:  # overwrite --quiver
        logging.warning("Overwrite --quiver to True for tofu_wrap. Continue.")
        args.quiver = True

    # check gmap reference genome
    if all(arg is None
           for arg in [args.gmap_db, args.gmap_name, args.gmap_ds]):
        raise ValueError("GMAP reference Database is not set! Quit.")
    # overwrite args.gmap_db, args.gmap_name if args.gmap_ds is not None
    if args.gmap_ds is not None:
        args.gmap_db, args.gmap_name = gmap_db_and_name_from_ds(args.gmap_ds)
    # check gmap dir existence
    if not op.exists(args.gmap_db):
        raise IOError("GMAP DB location not valid: %s. Quit.", args.gmap_db)
    if not op.exists(op.join(args.gmap_db, args.gmap_name)):
        raise IOError("GMAP name not valid: %s. Quit.", args.gmap_name)

    # check input format: bax.h5/bas.h5 must.
    if guess_file_format(args.bas_fofn) != FILE_FORMATS.BAM:
        raise ValueError(
            "--bas_fofn %s must be either BAM or subreadset.xml." %
            args.bas_fofn +
            "Bax.h5 must be converted to BAM using bax2bam first! " +
            "Multiple BAM files can be merged to a BAM FOFN or dataset xml.")

    # check output file format
    if not any(
            args.collapsed_filtered_fn.endswith(ext)
            for ext in (".fa", ".fasta", ".fq", ".fastq")):
        raise ValueError("Output file %s must be FASTA or FASTQ!" %
                         args.collapsed_filtered_fn)

    # check blasr version
    check_blasr()
예제 #9
0
    def validate_inputs(self):
        """Validate input fofns, and root_dir, log_dir, tmp_dir,
        create arrowed_dir and arrowed_log_dir"""
        self.add_log("Validating inputs.")

        # Create directories: root_dir/quivered and root_dir/log_dir/quivered
        try:
            mkdir(self.arrowed_dir)
            mkdir(self.arrowed_log_dir)
        except OSError:
            # Multiple ice_arrow_i jobs may run at the same time and try to
            # mkdir, race condition may happen, so ignore OSError here.
            pass

        errMsg = ""

        if not nfs_exists(self.log_dir) or not op.isdir(self.log_dir):
            errMsg = "Log dir {l} is not an existing directory.".\
                format(l=self.log_dir)
        elif self.subread_xml is None:
            errMsg = "Please specify subreads XML (e.g., --subread_xml=<movie>.subreadset.xml)."
        elif not nfs_exists(self.subread_xml):
            errMsg = "Specified subreads file (subread_xml={f}) does not exist.".format(
                f=self.subread_xml)
        elif guess_file_format(self.subread_xml) is not FILE_FORMATS.BAM:
            errMsg = "Invalid subreads XML file: {0}!".format(self.subread_xml)
        elif not nfs_exists(self.nfl_all_pickle_fn):
            #"output/map_noFL/noFL.ALL.partial_uc.pickle"):
            errMsg = "Pickle file {f} ".format(f=self.nfl_all_pickle_fn) + \
                     "which assigns all non-full-length reads to isoforms " + \
                     "does not exist. Please check 'run_IcePartials2.py *' are " + \
                     "all done."
        elif not nfs_exists(self.final_pickle_fn):
            errMsg = "Pickle file {f} ".format(f=self.final_pickle_fn) + \
                     "which assigns full-length non-chimeric reads to " + \
                     "isoforms does not exist."

        if errMsg != "":
            self.add_log(errMsg, level=logging.ERROR)
            raise IOError(errMsg)
예제 #10
0
def _sanity_check_args(args):
    """Sanity check tofu arguments."""
    # Check required arguments
    if args.nfl_fa is None:
        raise ValueError("--nfl_fa must be provided for tofu_wrap. Quit.")
    if args.bas_fofn is None:
        raise ValueError("--bas_fofn must be provided for polishing isoforms. Quit.")
    if not args.quiver: # overwrite --quiver
        logging.warning("Overwrite --quiver to True for tofu_wrap. Continue.")
        args.quiver = True

    # check gmap reference genome
    if all(arg is None for arg in [args.gmap_db, args.gmap_name, args.gmap_ds]):
        raise ValueError("GMAP reference Database is not set! Quit.")
    # overwrite args.gmap_db, args.gmap_name if args.gmap_ds is not None
    if args.gmap_ds is not None:
        args.gmap_db, args.gmap_name = gmap_db_and_name_from_ds(args.gmap_ds)
    # check gmap dir existence
    if not op.exists(args.gmap_db):
        raise IOError("GMAP DB location not valid: %s. Quit.", args.gmap_db)
    if not op.exists(op.join(args.gmap_db, args.gmap_name)):
        raise IOError("GMAP name not valid: %s. Quit.", args.gmap_name)

    # check input format: bax.h5/bas.h5 must.
    if guess_file_format(args.bas_fofn) != FILE_FORMATS.BAM:
        raise ValueError("--bas_fofn %s must be either BAM or subreadset.xml." % args.bas_fofn +
                         "Bax.h5 must be converted to BAM using bax2bam first! " +
                         "Multiple BAM files can be merged to a BAM FOFN or dataset xml.")

    # check output file format
    if not any(args.collapsed_filtered_fn.endswith(ext) for ext in
               (".fa", ".fasta", ".fq", ".fastq")):
        raise ValueError("Output file %s must be FASTA or FASTQ!" % args.collapsed_filtered_fn)

    # check blasr version
    check_blasr()
예제 #11
0
    def run(self):
        """
        First, split non-full-length (nfl) fasta files into smaller
        chunks, assign nfl reads in each splitted fasta file
        into unpolished isoform clusters and then merge all pickles
        into self.nfl_all_pickle_fn.
        Second, bin every 100 clusters, for each bin, call blasr,
        samto5h, loadPulses, cmph5tools to create cmp.h5 files and
        call quiver to polish each isoforms within each bin.
        Finally, pick up good isoform clusters whose QV errors is less
        than a threshold.
        Save all high quality isoforms to hq_isoforms_fa|fq if they are not None
        Save all low quality isoforms to lq_isoforms_fa|fq if they are not None
        """
        if guess_file_format(self.bas_fofn) != FILE_FORMATS.BAM:
            # Create input.fasta.fofn from bas_fofn
            self.add_log("Creating fasta fofn from bas/bax.h5/bam fofn",
                         level=logging.INFO)
            if self.fasta_fofn is None:
                self.fasta_fofn = op.join(self.nfl_dir, "input.fasta.fofn")
            self.add_log("fasta fofn={f}".format(f=self.fasta_fofn))
            convert_fofn_to_fasta(fofn_filename=self.bas_fofn,
                                  out_filename=self.fasta_fofn,
                                  fasta_out_dir=self.nfl_dir)
        else:
            self.fasta_fofn = None

        # Split non-full-length reads into smaller fasta files
        # and save files to root_dir/nfl_00.fasta, ..., .
        self.add_log("Splitting {nfl} into ".format(nfl=self.nfl_fa) +
                     "smaller files each containing {n} reads.".format(
                         n=self.ice_opts.nfl_reads_per_split),
                     level=logging.INFO)
        self._nfl_splitted_fas = splitFasta(
            input_fasta=self.nfl_fa,
            reads_per_split=self.ice_opts.nfl_reads_per_split,
            out_dir=self.nfl_dir,
            out_prefix="input.split")
        msg = "Splitted files are: " + "\n".join(self._nfl_splitted_fas)
        self.add_log(msg, level=logging.INFO)

        # Generating dazz DB for final.consensus.fasta
        ref_obj = DazzIDHandler(input_filename=self.final_consensus_fa,
                                converted=False)
        ref_obj.make_db()
        msg = "Dazz DB made for: " + ref_obj.dazz_filename
        self.add_log(msg, level=logging.INFO)

        # Process nfl reads in each splitted fasta.
        self.add_log("Initializing IceAllPartials.", level=logging.INFO)

        self.icep = IceAllPartials(root_dir=self.root_dir,
                                   fasta_filenames=self._nfl_splitted_fas,
                                   ref_fasta=self.final_consensus_fa,
                                   out_pickle=self.nfl_all_pickle_fn,
                                   sge_opts=self.sge_opts,
                                   ccs_fofn=self.ccs_fofn)
        self.add_log("IceAllPartials log: {f}.".format(f=self.icep.log_fn),
                     level=logging.INFO)
        self.icep.run()
        self.add_log("IceAllPartials completed.", level=logging.INFO)

        self.add_log("Initializing IceQuiver.", level=logging.INFO)
        self.iceq = IceQuiver(root_dir=self.root_dir,
                              bas_fofn=self.bas_fofn,
                              fasta_fofn=self.fasta_fofn,
                              sge_opts=self.sge_opts,
                              tmp_dir=self.tmp_dir)
        self.add_log("IceQuiver log: {f}.".format(f=self.iceq.log_fn),
                     level=logging.INFO)
        self.iceq.run()
        self.add_log("IceQuiver finished.", level=logging.INFO)

        self.add_log("Initializing IceQuiverPostprocess.", level=logging.INFO)
        self.icepq = IceQuiverPostprocess(root_dir=self.root_dir,
                                          use_sge=self.sge_opts.use_sge,
                                          quit_if_not_done=False,
                                          ipq_opts=self.ipq_opts)
        self.add_log(
            "IceQuiverPostprocess log: {f}.".format(f=self.icepq.log_fn),
            level=logging.INFO)
        self.icepq.run()
        self.add_log("IceQuiverPostprocess finished.", level=logging.INFO)
예제 #12
0
    def run(self):
        """
        First, split non-full-length (nfl) fasta files into smaller
        chunks, assign nfl reads in each splitted fasta file
        into unpolished isoform clusters and then merge all pickles
        into self.nfl_all_pickle_fn.
        Second, bin every 100 clusters, for each bin, call blasr,
        samto5h, loadPulses, cmph5tools to create cmp.h5 files and
        call quiver to polish each isoforms within each bin.
        Finally, pick up good isoform clusters whose QV errors is less
        than a threshold.
        Save all high quality isoforms to hq_isoforms_fa|fq if they are not None
        Save all low quality isoforms to lq_isoforms_fa|fq if they are not None
        """
        if guess_file_format(self.bas_fofn) != FILE_FORMATS.BAM:
            # Create input.fasta.fofn from bas_fofn
            self.add_log("Creating fasta fofn from bas/bax.h5/bam fofn",
                         level=logging.INFO)
            if self.fasta_fofn is None:
                self.fasta_fofn = op.join(self.nfl_dir, "input.fasta.fofn")
            self.add_log("fasta fofn={f}".format(f=self.fasta_fofn))
            convert_fofn_to_fasta(fofn_filename=self.bas_fofn,
                                  out_filename=self.fasta_fofn,
                                  fasta_out_dir=self.nfl_dir)
        else:
            self.fasta_fofn = None

        # Split non-full-length reads into smaller fasta files
        # and save files to root_dir/nfl_00.fasta, ..., .
        self.add_log("Splitting {nfl} into ".format(nfl=self.nfl_fa) +
                     "smaller files each containing {n} reads.".format(
                     n=self.ice_opts.nfl_reads_per_split),
                     level=logging.INFO)
        self._nfl_splitted_fas = splitFasta(input_fasta=self.nfl_fa,
                                            reads_per_split=self.ice_opts.nfl_reads_per_split,
                                            out_dir=self.nfl_dir,
                                            out_prefix="input.split")
        msg = "Splitted files are: " + "\n".join(self._nfl_splitted_fas)
        self.add_log(msg, level=logging.INFO)

        # Generating dazz DB for final.consensus.fasta
        ref_obj = DazzIDHandler(input_filename=self.final_consensus_fa, converted=False)
        ref_obj.make_db()
        msg = "Dazz DB made for: " + ref_obj.dazz_filename
        self.add_log(msg, level=logging.INFO)

        # Process nfl reads in each splitted fasta.
        self.add_log("Initializing IceAllPartials.", level=logging.INFO)

        self.icep = IceAllPartials(
            root_dir=self.root_dir,
            fasta_filenames=self._nfl_splitted_fas,
            ref_fasta=self.final_consensus_fa,
            out_pickle=self.nfl_all_pickle_fn,
            sge_opts=self.sge_opts,
            ccs_fofn=self.ccs_fofn)
        self.add_log("IceAllPartials log: {f}.".format(f=self.icep.log_fn),
                     level=logging.INFO)
        self.icep.run()
        self.add_log("IceAllPartials completed.", level=logging.INFO)

        self.add_log("Initializing IceQuiver.", level=logging.INFO)
        self.iceq = IceQuiver(root_dir=self.root_dir,
                              bas_fofn=self.bas_fofn,
                              fasta_fofn=self.fasta_fofn,
                              sge_opts=self.sge_opts,
                              tmp_dir=self.tmp_dir)
        self.add_log("IceQuiver log: {f}.".format(f=self.iceq.log_fn),
                     level=logging.INFO)
        self.iceq.run()
        self.add_log("IceQuiver finished.", level=logging.INFO)

        self.add_log("Initializing IceQuiverPostprocess.", level=logging.INFO)
        self.icepq = IceQuiverPostprocess(root_dir=self.root_dir,
                                          use_sge=self.sge_opts.use_sge,
                                          quit_if_not_done=False,
                                          ipq_opts=self.ipq_opts)
        self.add_log("IceQuiverPostprocess log: {f}.".
                     format(f=self.icepq.log_fn), level=logging.INFO)
        self.icepq.run()
        self.add_log("IceQuiverPostprocess finished.", level=logging.INFO)