def main(): parser = argparse.ArgumentParser("lookup bait and target intervals") # Logging flags. parser.add_argument( "-d", "--debug", action="store_const", const=logging.DEBUG, dest="level", help="Enable debug logging." ) parser.set_defaults(level=logging.INFO) parser.add_argument("--uuid", required=True, help="uuid string") parser.add_argument("--bam_path", required=True) parser.add_argument("--bam_library_key_json_path", required=False) parser.add_argument("--key_interval_json_path", required=False) # setup required parameters args = parser.parse_args() bam_path = args.bam_path uuid = args.uuid bam_library_key_json_path = args.bam_library_key_json_path key_interval_json_path = args.key_interval_json_path tool_name = "sra_hs_lookup" logger = pipe_util.setup_logging(tool_name, args, uuid) engine = pipe_util.setup_db(uuid) sra_hs_lookup.sra_hs_lookup(bam_path, bam_library_key_json_path, key_interval_json_path, engine, logger) return
def main(): parser = argparse.ArgumentParser('CRAMtools bam-style-index') # Logging flags. parser.add_argument('-d', '--debug', action = 'store_const', const = logging.DEBUG, dest = 'level', help = 'Enable debug logging.', ) parser.set_defaults(level = logging.INFO) # Required flags. parser.add_argument('-r', '--reference_fasta_path', required = False, help = 'Reference fasta path.', ) parser.add_argument('-b', '--cram_path', required = False, action="append", help = 'Source cram path.', ) parser.add_argument('-u', '--uuid', required = True, help = 'analysis_id string', ) parser.add_argument('--tool_name', required = True, help = 'cramtools' ) args = parser.parse_args() tool_name = args.tool_name uuid = args.uuid logger = pipe_util.setup_logging(tool_name, args, uuid) engine = pipe_util.setup_db(uuid) hostname = os.uname()[1] logger.info('hostname=%s' % hostname) if tool_name == 'index': cram_path = pipe_util.get_param(args, 'cram_path') reference_fasta_path = pipe_util.get_param(args, 'reference_fasta_path') index(uuid, cram_path, reference_fasta_path, engine, logger) else: sys.exit('No recognized tool was selected')
def main(): parser = argparse.ArgumentParser("picard sortvcf tool") # Logging flags. parser.add_argument( "-d", "--debug", action="store_const", const=logging.DEBUG, dest="level", help="Enable debug logging." ) parser.set_defaults(level=logging.INFO) # Tool flags parser.add_argument("--case_id", required=True, help="case_id string") parser.add_argument("--vcf_path", required=False, action="append") parser.add_argument("-out", "--output_vcf", required=False, help="Output VCF name") parser.add_argument("--reference_fasta_dict", required=False) db = parser.add_argument_group("Database parameters") db.add_argument("--host", default="172.17.65.79", help="hostname for db") db.add_argument("--database", default="prod_bioinfo", help="name of the database") db.add_argument("--postgres_config", default=None, help="postgres config file", required=True) # setup required parameters args = parser.parse_args() case_id = args.case_id logger = pipe_util.setup_logging("picard_sortvcf", args, case_id) hostname = os.uname()[1] logger.info("hostname=%s" % hostname) s = open(args.postgres_config, "r").read() postgres_config = eval(s) DATABASE = { "drivername": "postgres", "host": args.host, "port": "5432", "username": postgres_config["username"], "password": postgres_config["password"], "database": args.database, } engine = postgres.db_connect(DATABASE) vcf_path = pipe_util.get_param(args, "vcf_path") reference_fasta_dict = pipe_util.get_param(args, "reference_fasta_dict") output_vcf = pipe_util.get_param(args, "output_vcf") picard_sortvcf(case_id, vcf_path, output_vcf, reference_fasta_dict, engine, logger)
def main(): parser = argparse.ArgumentParser('bamutil docker tool') # Logging flags. parser.add_argument('-d', '--debug', action = 'store_const', const = logging.DEBUG, dest = 'level', help = 'Enable debug logging.', ) parser.set_defaults(level = logging.INFO) # Required flags. parser.add_argument('--tool_name', required = True, help = 'bamutil tool' ) parser.add_argument('--uuid', required = True, help = 'uuid string', ) # Tool flags parser.add_argument('--bam_path', required = False ) # setup required parameters args = parser.parse_args() tool_name = args.tool_name uuid = args.uuid logger = pipe_util.setup_logging(tool_name, args, uuid) engine = pipe_util.setup_db(uuid) be_lenient = True if tool_name == 'splitbam': bam_path = pipe_util.get_param(args, 'bam_path') splitbam(uuid, bam_path, engine, logger) else: sys.exit('No recognized tool was selected') return
def main(): parser = argparse.ArgumentParser('FastQC tool') # Logging flags. parser.add_argument('-d', '--debug', action = 'store_const', const = logging.DEBUG, dest = 'level', help = 'Enable debug logging.', ) parser.set_defaults(level = logging.INFO) # Required flags. parser.add_argument('-f', '--fastq_path', required=True ) parser.add_argument('-u', '--uuid', required=True ) parser.add_argument('-j', '--thread_count', type = is_nat, required=True ) # setup required parameters args = parser.parse_args() uuid = args.uuid fastq_path = args.fastq_path thread_count = args.thread_count tool_name = 'fastqc' logger = pipe_util.setup_logging(tool_name, args, uuid) sqlite_name = uuid + '_' + tool_name + '.db' engine_path = 'sqlite:///' + sqlite_name engine = sqlalchemy.create_engine(engine_path, isolation_level='SERIALIZABLE') fastqc.fastqc(uuid, fastq_path, thread_count, engine, logger) return
def main(): parser = argparse.ArgumentParser('GATK MuTect2 Panel Of Normal creation') # Logging flags. parser.add_argument('-d', '--debug', action = 'store_const', const = logging.DEBUG, dest = 'level', help = 'Enable debug logging.', ) parser.set_defaults(level = logging.INFO) # Required flags. parser.add_argument('-r', '--reference_fasta_path', required = False, help = 'Reference fasta path.', ) parser.add_argument('-rf', '--reference_fasta_fai', required = False, help = 'Reference fasta fai path.', ) parser.add_argument('-snp', '--known_snp_vcf_path', required = False, help='Reference SNP path.', ) parser.add_argument('-cos', '--cosmic_path', required = False, help='Reference COSMIC path.', ) parser.add_argument('-b', '--cram_path', required = False, action="append", help = 'Source cram path.', ) parser.add_argument('-v', '--vcf_path', required = False, action="append", help = 'Individual VCF path', ) parser.add_argument('-j', '--thread_count', required = False, type = is_nat, help = 'Maximum number of threads for execution.', ) parser.add_argument('-bs', '--Parallel_Block_Size', type = is_nat, default = 50000000, required = False, help = 'Parallel Block Size', ) parser.add_argument('-u', '--uuid', required = True, help = 'analysis_id string', ) parser.add_argument('--tool_name', required = True, help = 'gatk tool' ) args = parser.parse_args() tool_name = args.tool_name uuid = args.uuid thread_count = str(args.thread_count) Parallel_Block_Size = str(args.Parallel_Block_Size) logger = pipe_util.setup_logging('gatk_' + tool_name, args, uuid) engine = pipe_util.setup_db(uuid) hostname = os.uname()[1] logger.info('hostname=%s' % hostname) if tool_name == 'mutect2_pon_tool': cram_path = pipe_util.get_param(args, 'cram_path')[0] known_snp_vcf_path = pipe_util.get_param(args, 'known_snp_vcf_path') cosmic_path = pipe_util.get_param(args, 'cosmic_path') reference_fasta_path = pipe_util.get_param(args, 'reference_fasta_path') thread_count = pipe_util.get_param(args, 'thread_count') fai_path = pipe_util.get_param(args, 'reference_fasta_fai') blocksize = pipe_util.get_param(args, 'Parallel_Block_Size') mutect2_pon_tool.pon(uuid, cram_path, thread_count, reference_fasta_path, cosmic_path, fai_path, blocksize, known_snp_vcf_path, engine, logger) elif tool_name == 'CombineVariants': vcf_path_list = pipe_util.get_param(args, 'vcf_path') reference_fasta_path = pipe_util.get_param(args, 'reference_fasta_path') thread_count = pipe_util.get_param(args, 'thread_count') CombineVariants.combinevcf(uuid, vcf_path_list, reference_fasta_path, engine, logger) else: sys.exit('No recognized tool was selected')
def main(): parser = argparse.ArgumentParser('biobambam docker tool') # Logging flags. parser.add_argument('-d', '--debug', action = 'store_const', const = logging.DEBUG, dest = 'level', help = 'Enable debug logging.', ) parser.set_defaults(level = logging.INFO) # Required flags. parser.add_argument('--tool_name', required = True, help = 'biobambam tool' ) parser.add_argument('--uuid', required = True, help = 'uuid string', ) parser.add_argument('--input_state', required = True ) # Tool flags parser.add_argument('--bam_path', required = False ) parser.add_argument('--reference_fasta_path', required = False ) # setup required parameters args = parser.parse_args() tool_name = args.tool_name uuid = args.uuid input_state = args.input_state logger = pipe_util.setup_logging('biobambam2_' + tool_name, args, uuid) sqlite_name = tool_name + '_' + uuid +'.db' engine_path = 'sqlite:///' + sqlite_name engine = sqlalchemy.create_engine(engine_path, isolation_level='SERIALIZABLE') be_lenient = True if tool_name == 'bamfixmateinformation': bam_path = pipe_util.get_param(args, 'bam_path') bamfixmateinformation.bamfixmateinformation(uuid, bam_path, input_state, cpu_count, engine, logger) elif tool_name == 'bamindex': bam_path = pipe_util.get_param(args, 'bam_path') bamindex.bamindex(uuid, bam_path, input_state, engine, logger) elif tool_name == 'bammarkduplicates': bam_path = pipe_util.get_param(args, 'bam_path') bammarkduplicates.bammarkduplicates(uuid, bam_path, input_state, cpu_count, engine, logger) elif tool_name == 'bammarkduplicates2': bam_path = pipe_util.get_param(args, 'bam_path') bammarkduplicates2.bammarkduplicates2(uuid, bam_path, input_state, cpu_count, engine, logger) elif tool_name == 'bammdnm': bam_path = pipe_util.get_param(args, 'bam_path') reference_fasta_path = pipe_util.get_param(args, 'reference_fasta_path') bammdnm.bammdnm(uuid, bam_path, reference_fasta_path, input_state, engine, logger) elif tool_name == 'bammerge': bam_path = pipe_util.get_param(args, 'bam_path') bammerge.bammerge(uuid, bam_path, input_state, cpu_count, engine, logger) elif tool_name == 'bamsort': bam_path = pipe_util.get_param(args, 'bam_path') reference_fasta_path = pipe_util.get_param(args, 'reference_fasta_path') bamsort.bamsort(uuid, bam_path, reference_fasta_path, input_state, engine, logger) elif tool_name == 'bamtofastq': bam_path = pipe_util.get_param(args, 'bam_path') bamtofastq.bamtofastq(uuid, bam_path, input_state, engine, logger) elif tool_name == 'bamvalidate': bam_path = pipe_util.get_param(args, 'bam_path') bamvalidate.bamvalidate(uuid, bam_path, input_state, cpu_count, engine, logger) else: sys.exit('No recognized tool was selected') return