def get_parser(): p = get_pbparser(tool_id=Constants.TOOL_ID, version=__version__, name="Coverage", description=__doc__, driver_exe=Constants.DRIVER_EXE, is_distributed=True) ap = p.arg_parser.parser p.add_input_file_type(FileTypes.DS_REF, "reference", name="Reference DataSet", description="Reference DataSet XML or FASTA file") p.add_input_file_type(FileTypes.GFF, "gff", name="Alignment Summary GFF", description="Alignment Summary GFF") p.add_output_file_type(FileTypes.REPORT, "report_json", name=meta_rpt.title, description="Path to write report JSON output", default_name=meta_rpt.id) p.add_int( option_id=Constants.MAX_CONTIGS_ID, option_str="maxContigs", default=Constants.MAX_CONTIGS_DEFAULT, name="Maximum number of contigs to plot", description="Maximum number of contigs to plot in coverage report") return p
def _get_parser(): parser = get_pbparser(TOOL_ID, __version__, "CCS Mapping Statistics", __doc__, DRIVER_EXE) parser.add_input_file_type(FileTypes.DS_ALIGN_CCS, "alignment_file", "ConsensusAlignment XML DataSet", "BAM, SAM or ConsensusAlignment DataSet") parser.add_output_file_type(FileTypes.REPORT, "report_json", "PacBio Json Report", "Output report JSON file.", "mapping_stats_report.json") return parser
def get_contract_parser(): """ Central point of programmatically defining a Parser. :rtype: PbParser :return: PbParser """ # Commandline exe to call "{exe}" /path/to/resolved-tool-contract.json driver_exe = "python -m pbcommand.cli.example.dev_app --resolved-tool-contract " desc = "Dev app for Testing that supports emitting tool contracts" subcomponents = [("my_subcomponent", "1.2.3")] resource_types = (ResourceTypes.TMP_FILE, ResourceTypes.TMP_FILE, ResourceTypes.TMP_DIR) p = get_pbparser( TOOL_ID, __version__, "Example Dev App", desc, driver_exe, is_distributed=False, resource_types=resource_types, subcomponents=subcomponents, ) add_args_and_options(p) return p
def test_misc_parser_types(self): p = get_pbparser("pbcommand.tasks.test_parsers", "0.1.0", "Tool Name", "Tool Descripion", "pbcommand-driver-exe ") p.add_int("pbcommand.task_options.n", "n", default=0, name="N", description="Integer option") p.add_float("pbcommand.task_options.f", "f", default=0, name="F", description="Float option") # XXX note that the 'default' value is not actually what the option is # set to by default - it simply signals that action=store_true p.add_boolean("pbcommand.task_options.loud", "loud", default=False, name="Verbose", description="Boolean option") pa = p.arg_parser.parser.parse_args opts = pa(["--n", "250", "--f", "1.2345", "--loud"]) self.assertEqual(opts.n, 250) self.assertEqual(opts.f, 1.2345) self.assertTrue(opts.loud) opts2 = pa([]) self.assertFalse(opts2.loud)
def get_parser(): driver_exe = "python -m pbcommand.cli.examples.dev_app --resolved-tool-contract " desc = "Dev app for Testing that supports emitting tool contracts" # Can specify libs or other dependencies that subcomponents = [("pbcommand", VERSION), ("my_component", "0.1.0"), ("my_component_id", "1.2.3")] # ResourceTypes.* resource_types = (ResourceTypes.TMP_FILE, ResourceTypes.TMP_FILE, ResourceTypes.TMP_DIR) # Create an instance of a Pacbio Parser p = get_pbparser( TOOL_ID, VERSION, "Txt App", desc, driver_exe, is_distributed=False, resource_types=resource_types, subcomponents=subcomponents, ) # Add Input Files types p.add_input_file_type(FileTypes.TXT, "txt_in", "Txt file", "Generic Text File") # Add output files types p.add_output_file_type(FileTypes.TXT, "txt_out", "Txt outfile", "Generic Output Txt file", "output.txt") p.add_int("pbcommand.task_options.dev_max_nlines", "max_nlines", 10, "Max Lines", "Max Number of lines to Copy") return p
def _get_parser(): driver_exe = ("python -m " "pbreports.report.polished_assembly " "--resolved-tool-contract ") p = get_pbparser(Constants.TOOL_ID, __version__, spec.title, __doc__, driver_exe) return _add_options_to_parser(p)
def test_misc_parser_types(self): p = get_pbparser( "pbcommand.tasks.test_parsers", "0.1.0", "Tool Name", "Tool Descripion", "pbcommand-driver-exe ") p.add_int("pbcommand.task_options.n", "n", default=0, name="N", description="Integer option") p.add_float("pbcommand.task_options.f", "f", default=0, name="F", description="Float option") # XXX note that the 'default' value is not actually what the option is # set to by default - it simply signals that action=store_true p.add_boolean("pbcommand.task_options.loud", "loud", default=False, name="Verbose", description="Boolean option") pa = p.arg_parser.parser.parse_args opts = pa(["--n", "250", "--f", "1.2345", "--loud"]) self.assertEqual(opts.n, 250) self.assertEqual(opts.f, 1.2345) self.assertTrue(opts.loud) opts2 = pa([]) self.assertFalse(opts2.loud)
def get_parser(): p = get_pbparser(tool_id=Constants.TOOL_ID, version=__version__, name=Constants.TOOL_NAME, description=__doc__, driver_exe=Constants.DRIVER_EXE) p.add_input_file_type(FileTypes.DS_SUBREADS, "subreads", name="BarcodedSubreadSet", description="Barcoded Subread DataSet XML") p.add_input_file_type(FileTypes.DS_BARCODE, "barcodes", name="BarcodeSet", description="Barcode DataSet XML") p.add_output_file_type(FileTypes.REPORT, "report_json", name="Barcode Report", description="Summary of barcoding results", default_name="barcode_report") # TODO(nechols)(2016-03-15) not yet supported in SA 3.x # this is necessary for BasH5Reader to handle the differences between the # .ccs.h5 files and .bas.h5 files. ap = p.arg_parser.parser ap.add_argument('--ccs', action='store_true', help='Use consensus reads instead of subreads.') return p
def get_contract_parser(): """ Central point of programmatically defining a Parser. :rtype: PbParser :return: PbParser """ # Commandline exe to call "{exe}" /path/to/resolved-tool-contract.json driver_exe = "python -m pbcommand.cli.example.dev_app --resolved-tool-contract " desc = "Dev app for Testing that supports emitting tool contracts" subcomponents = [("my_subcomponent", "1.2.3")] resource_types = (ResourceTypes.TMP_FILE, ResourceTypes.TMP_FILE, ResourceTypes.TMP_DIR) p = get_pbparser(TOOL_ID, __version__, "Example Dev App", desc, driver_exe, is_distributed=False, resource_types=resource_types, subcomponents=subcomponents) add_args_and_options(p) return p
def get_parser(): p = get_pbparser( tool_id=Constants.TOOL_ID, version=__version__, name=Constants.TOOL_NAME, description=__doc__, driver_exe=Constants.DRIVER_EXE, ) p.add_input_file_type( FileTypes.DS_SUBREADS, "subreads", name="BarcodedSubreadSet", description="Barcoded Subread DataSet XML" ) p.add_input_file_type(FileTypes.DS_BARCODE, "barcodes", name="BarcodeSet", description="Barcode DataSet XML") p.add_output_file_type( FileTypes.REPORT, "report_json", name="Barcode report", description="Path to write Report json output.", default_name="barcode_report", ) # TODO(nechols)(2016-03-15) not yet supported in SA 3.x # this is necessary for BasH5Reader to handle the differences between the # .ccs.h5 files and .bas.h5 files. ap = p.arg_parser.parser ap.add_argument("--ccs", action="store_true", help="Use consensus reads instead of subreads.") return p
def get_contract_parser(): p = get_pbparser( Constants.TOOL_ID, __version__, spec.title, __doc__, Constants.DRIVER_EXE, is_distributed=True) add_base_options_pbcommand(p, "Top Variants Report") p.add_input_file_type(FileTypes.GFF, file_id="gff", name="GFF file", description="variants.gff (can be gzip'ed)") p.add_input_file_type(FileTypes.DS_REF, file_id="reference", name="Reference dataset", description="ReferenceSet or FASTA") # p.add_output_file_type(FileTypes.REPORT, "report", # "JSON report", "JSON report", "report.json") p.add_int(Constants.HOW_MANY_ID, "how_many", default=Constants.HOW_MANY_DEFAULT, name="Number of variants", description="number of top variants to show (default=100)") p.add_int(Constants.BATCH_SORT_SIZE_ID, "batch_sort_size", default=Constants.BATCH_SORT_SIZE_DEFAULT, name="Batch sort size", description="Intermediate sort size parameter (default=10000)") # XXX do we need a flag for minor variants? return p
def get_parser(): p = get_pbparser(tool_id=Constants.TOOL_ID, version=__version__, name=Constants.TOOL_NAME, description=__doc__, driver_exe=Constants.DRIVER_EXE) p.add_input_file_type( FileTypes.DATASTORE, "ds_bc", name="JSON Datastore or SubreadSet or ConsensusReadSet", description="Datastore of barcoded SubreadSet/ConsensusReadSet files") p.add_input_file_type(FileTypes.DS_SUBREADS, "subreads_in", name="Input SubreadSet", description="Input SubreadSet (without barcodes)") p.add_input_file_type(FileTypes.DS_BARCODE, "barcodes", name="BarcodeSet", description="Barcode DataSet XML") p.add_output_file_type(FileTypes.REPORT, "report_json", name="Barcode Report", description="Summary of barcoding results", default_name="barcode_report") p.tool_contract_parser.add_output_file_type( FileTypes.CSV, "report_csv", name="Barcode Report Details", description="Barcode Details Table as CSV", default_name="barcodes_report") p.add_boolean(Constants.ISOSEQ_MODE, "isoseq_mode", False, "Iso-Seq mode", "Iso-Seq mode") return p
def get_parser(): """ Construct a hybrid PbParser with most tool contract parameters defined separately from argparser parameters. """ p = get_pbparser( tool_id=Constants.TOOL_ID, version=__VERSION__, name="variantCaller", description="Compute genomic consensus and call variants relative to the reference.", driver_exe=Constants.DRIVER_EXE, nproc=SymbolTypes.MAX_NPROC, resource_types=(), default_level="WARN") tcp = p.tool_contract_parser tcp.add_input_file_type(FileTypes.DS_ALIGN, "infile", "Alignment DataSet", "BAM or Alignment DataSet") tcp.add_input_file_type(FileTypes.DS_REF, "reference", "Reference DataSet", "Fasta or Reference DataSet") tcp.add_output_file_type(FileTypes.GFF, "variants", name="Consensus GFF", description="Consensus GFF", default_name="variants") tcp.add_output_file_type(FileTypes.DS_CONTIG, "consensus", name="Consensus ContigSet", description="Consensus sequence in Fasta format", default_name="consensus") tcp.add_output_file_type(FileTypes.FASTQ, "consensus_fastq", name="Consensus fastq", description="Consensus fastq", default_name="consensus") tcp.add_str( option_id=Constants.ALGORITHM_ID, option_str="algorithm", default=Constants.DEFAULT_ALGORITHM, name="Algorithm", description="Variant calling algorithm") tcp.add_int( option_id=Constants.MIN_CONFIDENCE_ID, option_str="minConfidence", default=Constants.DEFAULT_MIN_CONFIDENCE, name="Minimum confidence", description="The minimum confidence for a variant call to be output "+\ "to variants.gff") tcp.add_int( option_id=Constants.MIN_COVERAGE_ID, option_str="minCoverage", default=Constants.DEFAULT_MIN_COVERAGE, name="Minimum coverage", description="The minimum site coverage that must be achieved for " +\ "variant calls and consensus to be calculated for a site.") tcp.add_boolean( option_id=Constants.DIPLOID_MODE_ID, option_str="diploid", default=False, name="Diploid mode (experimental)", description="Enable detection of heterozygous variants (experimental)") add_options_to_argument_parser(p.arg_parser.parser) return p
def get_parser(): p = get_pbparser(Constants.TOOL_ID, Constants.VERSION, "AlignmentSet consolidate", __doc__, Constants.DRIVER, is_distributed=True, resource_types=(ResourceTypes.TMP_DIR,)) p.add_input_file_type(FileTypes.DS_ALIGN, "align_in", "Input AlignmentSet", "Gathered AlignmentSet to consolidate") p.add_output_file_type(FileTypes.DS_ALIGN, "ds_out", "Alignments", description="Alignment results dataset", default_name="combined") p.add_boolean(Constants.CONSOLIDATE_ID, "consolidate", default=False, name="Consolidate .bam", description="Merge chunked/gathered .bam files") p.add_int(Constants.N_FILES_ID, "consolidate_n_files", default=1, name="Number of .bam files", description="Number of .bam files to create in consolidate mode") return p
def get_parser(): p = get_pbparser( tool_id=Constants.TOOL_ID, version=__version__, name="Coverage", description=__doc__, driver_exe=Constants.DRIVER_EXE, is_distributed=True) ap = p.arg_parser.parser p.add_input_file_type(FileTypes.DS_REF, "reference", name="Reference DataSet", description="Reference DataSet XML or FASTA file") p.add_input_file_type(FileTypes.GFF, "gff", name="Alignment Summary GFF", description="Alignment Summary GFF") p.add_output_file_type(FileTypes.REPORT, "report_json", name="JSON report", description="Path to write report JSON output", default_name="coverage_report") p.add_int( option_id=Constants.MAX_CONTIGS_ID, option_str="maxContigs", default=Constants.MAX_CONTIGS_DEFAULT, name="Maximum number of contigs to plot", description="Maximum number of contigs to plot in coverage report") return p
def get_parser(): driver_exe = "python -m pbcommand.cli.examples.dev_app --resolved-tool-contract " desc = "Dev app for Testing that supports emitting tool contracts" # Can specify libs or other dependencies that subcomponents = [("pbcommand", VERSION), ("my_component", "0.1.0"), ("my_component_id", "1.2.3")] # ResourceTypes.* resource_types = (ResourceTypes.TMP_FILE, ResourceTypes.TMP_FILE, ResourceTypes.TMP_DIR) # Create an instance of a Pacbio Parser p = get_pbparser(TOOL_ID, VERSION, "Txt App", desc, driver_exe, is_distributed=False, resource_types=resource_types, subcomponents=subcomponents) # Add Input Files types p.add_input_file_type(FileTypes.TXT, "txt_in", "Txt file", "Generic Text File") # Add output files types p.add_output_file_type(FileTypes.TXT, "txt_out", "Txt outfile", "Generic Output Txt file", "output") p.add_int("pbcommand.task_options.dev_max_nlines", "max_nlines", 10, "Max Lines", "Max Number of lines to Copy") return p
def get_contract_parser(): p = get_pbparser(Constants.TOOL_ID, __version__, meta_rpt.title, __doc__, Constants.DRIVER_EXE, is_distributed=True) add_base_options_pbcommand(p, "Top Variants Report") p.add_input_file_type(FileTypes.GFF, file_id="gff", name="GFF file", description="variants.gff (can be gzip'ed)") p.add_input_file_type(FileTypes.DS_REF, file_id="reference", name="Reference dataset", description="ReferenceSet or FASTA") # p.add_output_file_type(FileTypes.REPORT, "report", # "JSON report", "JSON report", "report.json") p.add_int(Constants.HOW_MANY_ID, "how_many", default=Constants.HOW_MANY_DEFAULT, name="Number of variants", description="number of top variants to show (default=100)") p.add_int(Constants.BATCH_SORT_SIZE_ID, "batch_sort_size", default=Constants.BATCH_SORT_SIZE_DEFAULT, name="Batch sort size", description="Intermediate sort size parameter (default=10000)") # XXX do we need a flag for minor variants? return p
def get_contract_parser(): p = get_pbparser( Constants.TOOL_ID, __version__, "Iso-Seq Cluster Report", __doc__, Constants.DRIVER_EXE, is_distributed=True) p.add_input_file_type(FileTypes.DS_CONTIG, "inReadsFN", "Fasta reads", description="Reads in FASTA format, usually are consensus, " + "isoforms produced by Iso-Seq Cluster.") p.add_input_file_type(FileTypes.DS_CONTIG, "hq_isoforms_fq", "HQ isoforms in Fastq", description="HQ isoforms in FASTQ format produced by Iso-Seq Cluster.") p.add_input_file_type(FileTypes.DS_CONTIG, "lq_isoforms_fq", "LQ isoforms in Fastq", description="LQ isoforms in FASTQ format produced by Iso-Seq Cluster.") p.add_input_file_type(FileTypes.JSON, "inSummaryFN", "Summary text", description="A summary produced by Iso-Seq Cluster, e.g. " + "cluster_summary.txt") p.add_output_file_type(FileTypes.REPORT, "outJson", "Transcript Clustering Report", description="Summary of results from pbtranscript", default_name="isoseq_cluster_report") return p
def _get_parser(): p = get_pbparser( Constants.TOOL_ID, __version__, "Iso-Seq Report", __doc__, Constants.DRIVER_EXE, is_distributed=True) p.add_input_file_type( FileTypes.DS_TRANSCRIPT, "hq_transcripts", "Clustered high-quality transcripts", description="Clustered transcripts from 'sierra' in BAM dataset format") p.add_input_file_type( FileTypes.DS_TRANSCRIPT, "lq_transcripts", "Clustered low-quality transcripts", description="Clustered transcripts from 'sierra' in BAM dataset format") p.add_output_file_type( FileTypes.REPORT, "outJson", "Transcript Clustering Report", description="Summary of results from pbtranscript", default_name="isoseq3_report") return p
def get_parser(): """ Construct a hybrid PbParser with most tool contract parameters defined separately from argparser parameters. """ p = get_pbparser( tool_id=Constants.TOOL_ID, version=__VERSION__, name="variantCaller", description="Compute genomic consensus and call variants relative to the reference.", driver_exe=Constants.DRIVER_EXE, nproc=SymbolTypes.MAX_NPROC, resource_types=(), default_level="WARN") tcp = p.tool_contract_parser tcp.add_input_file_type(FileTypes.DS_ALIGN, "infile", "Alignment DataSet", "BAM or Alignment DataSet") tcp.add_input_file_type(FileTypes.DS_REF, "reference", "Reference DataSet", "Fasta or Reference DataSet") tcp.add_output_file_type(FileTypes.GFF, "variants", name="Variant Calls", description="List of variants from the reference", default_name="variants") tcp.add_output_file_type(FileTypes.DS_CONTIG, "consensus", name="Consensus Contigs", description="Consensus contigs dataset", default_name="consensus") tcp.add_output_file_type(FileTypes.FASTQ, "consensus_fastq", name="Consensus Contigs", description="Consensus contigs in FASTQ format", default_name="consensus") tcp.add_str( option_id=Constants.ALGORITHM_ID, option_str="algorithm", default=Constants.DEFAULT_ALGORITHM, name="Algorithm", description="Variant calling algorithm") tcp.add_int( option_id=Constants.MIN_CONFIDENCE_ID, option_str="minConfidence", default=Constants.DEFAULT_MIN_CONFIDENCE, name="Minimum confidence", description="The minimum confidence for a variant call to be output "+\ "to variants.gff") tcp.add_int( option_id=Constants.MIN_COVERAGE_ID, option_str="minCoverage", default=Constants.DEFAULT_MIN_COVERAGE, name="Minimum coverage", description="The minimum site coverage that must be achieved for " +\ "variant calls and consensus to be calculated for a site.") tcp.add_boolean( option_id=Constants.DIPLOID_MODE_ID, option_str="diploid", default=False, name="Diploid mode (experimental)", description="Enable detection of heterozygous variants (experimental)") add_options_to_argument_parser(p.arg_parser.parser) return p
def _get_parser(): driver_exe = ("python -m " "pbreports.report.summarize_coverage.summarize_coverage " "--resolved-tool-contract ") p = get_pbparser(Constants.TOOL_ID, __version__, "Summarize Coverage", __doc__, driver_exe) return add_options_to_parser(p)
def _get_parser_core(): driver_exe = ("python -m " "pbreports.report.amplicon_analysis_timing " "--resolved-tool-contract ") p = get_pbparser(Constants.TOOL_ID, __version__, spec.title, __doc__, driver_exe) return p
def _get_parser(): p = get_pbparser(Constants.TOOL_ID, __version__, "Minor Variants Report", __doc__, Constants.DRIVER_EXE, is_distributed=True) return _add_options_to_parser(p)
def _get_parser_core(): p = get_pbparser(Constants.TOOL_ID, __version__, meta_rpt.title, __doc__, Constants.DRIVER_EXE, is_distributed=True) return p
def _get_parser(): p = get_pbparser(Constants.TOOL_ID, __version__, spec.title, __doc__, Constants.DRIVER_EXE, is_distributed=True) return _add_options_to_parser(p)
def get_parser(): p = get_pbparser( Constants.TOOL_ID, __version__, "Summarize Coverage (CCS)", __doc__, Constants.DRIVER_EXE) return p
def _get_parser_core(): p = get_pbparser(Constants.TOOL_ID, __version__, "Loading XML Report", __doc__, Constants.DRIVER_EXE, is_distributed=True) return p
def _get_parser_core(): p = get_pbparser(Constants.TOOL_ID, __version__, "Raw Dataset Statistics XML Report", __doc__, Constants.DRIVER_EXE, is_distributed=True) return p
def _get_parser(): p = get_pbparser( Constants.TOOL_ID, __version__, "Amplicon Analysis Input", __doc__, Constants.DRIVER_EXE) return _add_options_to_parser(p)
def _get_parser_core(): p = get_pbparser( Constants.TOOL_ID, __version__, "Amplicon Analysis Input", __doc__, Constants.DRIVER_EXE) return p
def _get_parser_core(): p = get_pbparser( Constants.TOOL_ID, __version__, "Loading XML Report", __doc__, Constants.DRIVER_EXE, is_distributed=True) return p
def _get_parser_core(): p = get_pbparser( Constants.TOOL_ID, __version__, spec.title, __doc__, Constants.DRIVER_EXE, is_distributed=True) return p
def _get_parser_core(): p = get_pbparser( Constants.TOOL_ID, __version__, "Raw Dataset Statistics XML Report", __doc__, Constants.DRIVER_EXE, is_distributed=True) return p
def _get_parser(): desc = "Create a Mapping Report from a Aligned BAM or Alignment DataSet" driver_exe = "python -m pbreports.report.mapping_stats --resolved-tool-contract " parser = get_pbparser(TOOL_ID, __version__, "Mapping Statistics", desc, driver_exe) parser.add_input_file_type(FileTypes.DS_ALIGN, "alignment_file", "Alignment XML DataSet", "BAM, SAM or Alignment DataSet") parser.add_output_file_type(FileTypes.REPORT, "report_json", "PacBio Json Report", "Output report JSON file.", "mapping_stats_report.json") return parser
def test_catch_output_file_extension(self): p = get_pbparser("pbcommand.tasks.test_parsers", "0.1.0", "Tool Name", "Tool Descripion", "pbcommand-driver-exe ") p.add_output_file_type(file_type=FileTypes.GFF, file_id="gff", name="GFF file", description="GFF file description", default_name="annotations.gff") tc = p.to_contract() self.assertRaises(ValueError, tc.to_dict)
def get_base_contract_parser(Constants=BaseConstants, default_level="WARN"): p = get_pbparser(tool_id=Constants.TOOL_ID, version=get_version(), name=Constants.TOOL_ID, description=Constants.PARSER_DESC, driver_exe=Constants.DRIVER_EXE, nproc=SymbolTypes.MAX_NPROC, resource_types=(ResourceTypes.TMP_DIR, ), default_level=default_level) return p
def _get_parser_core(): driver_exe = "python -m pbreports.report.sat --resolved-tool-contract " p = get_pbparser( TOOL_ID, __version__, "SAT Report", __doc__, driver_exe, is_distributed=True) return p
def test_input_output_files(self): p = get_pbparser( "pbcommand.tasks.test_parsers", "0.1", "Display Name", "Tool Description ", "pbcommand-driver-cmd", is_distributed=False, nproc=1, resource_types=()) p.add_input_file_type( file_type=FileTypes.FASTA, file_id="fasta", name="Fasta file", description="Fasta file description") p.add_input_file_type(FileTypes.JSON, "json", "JSON file", "JSON file description") p.add_output_file_type( file_type=FileTypes.GFF, file_id="gff", name="GFF file", description="GFF file description", default_name="annotations") tc_contract = p.to_contract() d = tc_contract.to_dict() inputs = d['tool_contract']['input_types'] self.assertEqual(inputs, [ { 'description': 'Fasta file description', 'title': 'Fasta file', 'id': 'fasta', 'file_type_id': 'PacBio.FileTypes.Fasta' }, { 'description': 'JSON file description', 'title': 'JSON file', 'id': 'json', 'file_type_id': 'PacBio.FileTypes.json' } ]) outputs = d['tool_contract']['output_types'] self.assertEqual(outputs, [ { 'title': 'GFF file', 'description': 'GFF file description', 'default_name': 'annotations', 'id': 'gff', 'file_type_id': 'PacBio.FileTypes.gff' } ])
def _get_parser_core(): driver_exe = ("python -m " "pbreports.report.preassembly " "--resolved-tool-contract ") p = get_pbparser( Constants.TOOL_ID, __version__, "Preassembly", __doc__, driver_exe) return p
def _get_parser_core(): driver_exe = ("python -m " "pbreports.report.amplicon_analysis_timing " "--resolved-tool-contract ") p = get_pbparser( Constants.TOOL_ID, __version__, "Amplicon Analysis Timing", __doc__, driver_exe) return p
def _get_parser_core(): driver_exe = ("python -m " "pbreports.report.polished_assembly " "--resolved-tool-contract ") p = get_pbparser( Constants.TOOL_ID, __version__, meta_rpt.title, __doc__, driver_exe) return p
def _get_parser_core(): driver_exe = ("python -m " "pbreports.report.summarize_coverage.summarize_coverage " "--resolved-tool-contract ") p = get_pbparser( Constants.TOOL_ID, __version__, "Summarize Coverage", __doc__, driver_exe) return p
def _get_parser_core(): driver_exe = ("python -m " "pbreports.report.amplicon_analysis_consensus " "--resolved-tool-contract ") p = get_pbparser( Constants.TOOL_ID, __version__, spec.title, __doc__, driver_exe) return p
def get_base_contract_parser(Constants=BaseConstants, default_level="WARN"): p = get_pbparser( tool_id=Constants.TOOL_ID, version=get_version(), name=Constants.TOOL_ID, description=Constants.PARSER_DESC, driver_exe=Constants.DRIVER_EXE, nproc=SymbolTypes.MAX_NPROC, resource_types=(ResourceTypes.TMP_DIR,), default_level=default_level) return p
def _get_parser(): parser = get_pbparser(Constants.TOOL_ID, __version__, ccs_spec.title, __doc__, Constants.DRIVER_EXE) parser.add_input_file_type(FileTypes.DS_ALIGN_CCS, "alignment_file", "ConsensusAlignment XML DataSet", "BAM, SAM or ConsensusAlignment DataSet") parser.add_output_file_type(FileTypes.REPORT, "report_json", "Mapping Statistics Report", "Summary of alignment results", default_name=Constants.R_ID) return parser
def _get_parser(): parser = get_pbparser(Constants.TOOL_ID, __version__, spec.title, __doc__, Constants.DRIVER_EXE) parser.add_input_file_type(FileTypes.DS_ALIGN_CCS, "alignment_file", "ConsensusAlignment XML DataSet", "BAM, SAM or ConsensusAlignment DataSet") parser.add_output_file_type(FileTypes.REPORT, "report_json", "Mapping Statistics Report", "Summary of alignment results", default_name=Constants.R_ID) return parser
def _get_parser(): parser = get_pbparser(Constants.TOOL_ID, __version__, meta_rpt.title, __doc__, Constants.DRIVER_EXE) parser.add_input_file_type(FileTypes.DS_ALIGN_CCS, "alignment_file", "ConsensusAlignment XML DataSet", "BAM, SAM or ConsensusAlignment DataSet") parser.add_output_file_type(FileTypes.REPORT, "report_json", "CCS Mapping Statistics Report", "Output report JSON file.", default_name=meta_rpt.id) return parser
def get_parser(): p = get_pbparser( Constants.TOOL_ID, __version__, "Modifications Report", __doc__, Constants.DRIVER_EXE, is_distributed=True) p.add_input_file_type(FileTypes.H5, "basemods_h5", "HDF5 file", "HDF5 file of base modifications from ipdSummary") add_base_options_pbcommand(p, "Basemods report") return p
def _get_parser(): desc = "Create a Mapping Report from a Aligned BAM or Alignment DataSet" driver_exe = "python -m pbreports.report.mapping_stats --resolved-tool-contract " parser = get_pbparser(Constants.TOOL_ID, __version__, "Mapping Statistics", desc, driver_exe, nproc=1) parser.add_input_file_type(FileTypes.DS_ALIGN, "alignment_file", "Alignment XML DataSet", "BAM, SAM or Alignment DataSet") parser.add_output_file_type(FileTypes.REPORT, "report_json", "Mapping Statistics Report", "Output report JSON file.", meta_rpt.id) return parser
def _get_parser(): desc = "Create a Mapping Report from a Aligned BAM or Alignment DataSet" driver_exe = "python -m pbreports.report.mapping_stats --resolved-tool-contract " parser = get_pbparser(Constants.TOOL_ID, __version__, "Mapping Statistics", desc, driver_exe, nproc=1) parser.add_input_file_type(FileTypes.DS_ALIGN, "alignment_file", "Alignment XML DataSet", "BAM, SAM or Alignment DataSet") parser.add_output_file_type(FileTypes.REPORT, "report_json", "Mapping Statistics Report", "Summary of alignment results", Constants.R_ID) return parser
def get_parser(): p = get_pbparser(Constants.TOOL_ID, __version__, meta_rpt.title, __doc__, Constants.DRIVER_EXE) p.add_input_file_type(FileTypes.GFF, 'gff_file', "GFF file", "Path to motifs.gff.gz") p.add_input_file_type(FileTypes.CSV, 'motif_summary_csv', "CSV file", 'Path to Motif summary CSV') p.add_output_file_type(FileTypes.REPORT, 'report_json', name="Motifs report", description="Path of output JSON report", default_name="motifs_report") return p
def get_subreads_report_parser(tool_id, version, title, desc, driver_exe): p = get_pbparser(tool_id, version, title, desc, driver_exe, is_distributed=True) p.add_input_file_type( FileTypes.DS_SUBREADS, file_id="subread_set", name="SubreadSet", description="SubreadSet") p.add_output_file_type(FileTypes.REPORT, "report", title, description=("Filename of JSON output report. " "Should be name only, and will be " "written to output dir"), default_name="report") return p
def get_parser(): p = get_pbparser( Constants.TOOL_ID, __version__, "Modifications Report", __doc__, Constants.DRIVER_EXE, is_distributed=True) p.add_input_file_type(FileTypes.H5, "basemods_h5", "HDF5 file", "HDF5 file of base modifications from ipdSummary") p.add_output_file_type(FileTypes.REPORT, "report", "Basemods report", description="Summary of basemod results", default_name="report") return p
def _get_contract_parser(): """ Central point of programmatically defining a Parser. :rtype: PbParser :return: PbParser """ # Number of processors to use nproc = 2 # Commandline exe to call "{exe}" /path/to/resolved-tool-contract.json driver_exe = "python -m pbcommand.cli.example.dev_app --resolved-tool-contract " desc = "Dev app for Testing that supports emitting tool contracts" p = get_pbparser(TOOL_ID, __version__, "DevApp", desc, driver_exe, is_distributed=False, nproc=nproc) return p
def test_catch_output_file_extension(self): p = get_pbparser( "pbcommand.tasks.test_parsers", "0.1.0", "Tool Name", "Tool Descripion", "pbcommand-driver-exe ") p.add_output_file_type( file_type=FileTypes.GFF, file_id="gff", name="GFF file", description="GFF file description", default_name="annotations.gff") tc = p.to_contract() self.assertRaises(ValueError, tc.to_dict)
def _get_parser(): parser = get_pbparser(Constants.TOOL_ID, __version__, spec.title, __doc__, Constants.DRIVER_EXE) parser.add_input_file_type(FileTypes.DS_ALIGN, "alignment_file", "Alignment XML DataSet", "BAM, SAM or Alignment DataSet") parser.add_input_file_type(FileTypes.DS_SUBREADS, "subreads_file", "Subreads XML DataSet", "Unmapped BAM or Subreads DataSet") parser.add_output_file_type(FileTypes.REPORT, "report_json", "Mapping Statistics Report", "Summary of alignment results", default_name=Constants.R_ID) return parser