def compress_files(*args): return utils.compress_files(*args)
def run_main(config, config_file, fc_dir, work_dir, run_info_yaml): _record_sw_versions(config, os.path.join(work_dir, "bcbb_software_versions.txt")) prog = utils.RecordProgress(work_dir) to_compress = set() prog.progress("analysis_start") align_dir = os.path.join(work_dir, "alignments") run_module = "bcbio.distributed" fc_name, fc_date, run_info = get_run_info(fc_dir, config, run_info_yaml) fastq_dir, galaxy_dir, config_dir = _get_full_paths(get_fastq_dir(fc_dir), config, config_file) config_file = os.path.join(config_dir, os.path.basename(config_file)) dirs = {"fastq": fastq_dir, "galaxy": galaxy_dir, "align": align_dir, "work": work_dir, "flowcell": fc_dir, "config": config_dir} run_parallel = parallel_runner(run_module, dirs, config, config_file) run_items = add_multiplex_across_lanes(run_info["details"], dirs["fastq"], fc_name) lanes = ((info, fc_name, fc_date, dirs, config) for info in run_items) lane_items = run_parallel("process_lane", lanes) [to_compress.add(f) for f in lane_items[0][0:2]] prog.progress("process_lane") # upload the sequencing report to Google Docs # will skip this for now and rely on external mechanism for uploading this data #gdocs_indicator = os.path.join(work_dir, "gdocs_report_complete.txt") #if not os.path.exists(gdocs_indicator) \ #and queue_report(fc_date, fc_name, os.path.abspath(run_info_yaml), dirs, config, config_file): # utils.touch_file(gdocs_indicator) # Remove spiked in controls, contaminants etc. lane_items = run_parallel("remove_contaminants", lane_items) [to_compress.add(f) for f in lane_items[0][0:2]] prog.progress("remove_contaminants") align_items = run_parallel("process_alignment", lane_items) [to_compress.add(f) for f in align_items[0]['fastq']] prog.progress("process_alignment") # process samples, potentially multiplexed across multiple lanes samples = organize_samples(align_items, dirs, config_file) samples = run_parallel("merge_sample", samples) to_compress.add(samples[0][0]['fastq1']) to_compress.add(samples[0][0]['fastq2']) prog.progress("merge_sample") samples = run_parallel("mark_duplicates_sample", samples) to_compress.add(samples[0][0]['fastq1']) to_compress.add(samples[0][0]['fastq2']) prog.progress("mark_duplicates_sample") run_parallel("screen_sample_contaminants", samples) prog.progress("screen_sample_contaminants") samples = run_parallel("recalibrate_sample", samples) prog.progress("recalibrate_sample") samples = parallel_realign_sample(samples, run_parallel) prog.progress("realign_sample") samples = parallel_variantcall(samples, run_parallel) prog.progress("variantcall") samples = run_parallel("detect_sv", samples) prog.progress("detect_sv") samples = run_parallel("process_sample", samples) prog.progress("process_sample") samples = run_parallel("generate_bigwig", samples, {"programs": ["ucsc_bigwig"]}) prog.progress("generate_bigwig") write_project_summary(samples) write_metrics(run_info, fc_name, fc_date, dirs) prog.progress("write_metrics") # Write statusdb metrics # will skip this for now and rely on external mechanism for uploading this data #report_to_statusdb(fc_name, fc_date, run_info_yaml, dirs, config) #Compress all files in to_compress if config['algorithm'].get('compress_files', True): (before, after) = utils.compress_files(to_compress) logger.info("Space used by the files before compressing (in bytes): " \ + str(before)) logger.info("Space used by the files after compressing (in bytes): " \ + str(after)) logger.info("Saved space (in bytes): " + str(before - after))
def run_main(config, config_file, fc_dir, work_dir, run_info_yaml): _record_sw_versions(config, os.path.join(work_dir, "bcbb_software_versions.txt")) prog = RecordProgress(work_dir) to_compress = set() prog.progress("analysis_start") align_dir = os.path.join(work_dir, "alignments") run_module = "bcbio.distributed" fc_name, fc_date, run_info = get_run_info(fc_dir, config, run_info_yaml) fastq_dir, galaxy_dir, config_dir = _get_full_paths(get_fastq_dir(fc_dir), config, config_file) config_file = os.path.join(config_dir, os.path.basename(config_file)) dirs = { "fastq": fastq_dir, "galaxy": galaxy_dir, "align": align_dir, "work": work_dir, "flowcell": fc_dir, "config": config_dir, } run_parallel = parallel_runner(run_module, dirs, config, config_file) run_items = add_multiplex_across_lanes(run_info["details"], dirs["fastq"], fc_name) lanes = ((info, fc_name, fc_date, dirs, config) for info in run_items) lane_items = run_parallel("process_lane", lanes) [to_compress.add(f) for f in lane_items[0][0:2]] prog.dummy() prog.progress("process_lane") # Remove spiked in controls, contaminants etc. lane_items = run_parallel("remove_contaminants", lane_items) [to_compress.add(f) for f in lane_items[0][0:2]] prog.dummy() prog.progress("remove_contaminants") align_items = run_parallel("process_alignment", lane_items) [to_compress.add(f) for f in align_items[0]["fastq"]] prog.dummy() prog.progress("process_alignment") # process samples, potentially multiplexed across multiple lanes samples = organize_samples(align_items, dirs, config_file) samples = run_parallel("merge_sample", samples) to_compress.add(samples[0][0]["fastq1"]) to_compress.add(samples[0][0]["fastq2"]) prog.dummy() prog.progress("merge_sample") samples = run_parallel("mark_duplicates_sample", samples) to_compress.add(samples[0][0]["fastq1"]) to_compress.add(samples[0][0]["fastq2"]) prog.dummy() prog.progress("mark_duplicates_sample") run_parallel("screen_sample_contaminants", samples) prog.dummy() prog.progress("screen_sample_contaminants") samples = run_parallel("recalibrate_sample", samples) prog.dummy() prog.progress("recalibrate_sample") samples = parallel_realign_sample(samples, run_parallel) prog.dummy() prog.progress("realign_sample") samples = parallel_variantcall(samples, run_parallel) prog.dummy() prog.progress("variantcall") samples = run_parallel("detect_sv", samples) prog.dummy() prog.progress("detect_sv") samples = run_parallel("process_sample", samples) prog.dummy() prog.progress("process_sample") samples = run_parallel("generate_bigwig", samples, {"programs": ["ucsc_bigwig"]}) prog.dummy() prog.progress("generate_bigwig") write_project_summary(samples) write_metrics(run_info, fc_name, fc_date, dirs) prog.dummy() prog.progress("write_metrics") # Compress all files in to_compress if config["algorithm"].get("compress_files", True): (before, after) = utils.compress_files(to_compress) logger.info("Space used by the files before compressing (in bytes): " + str(before)) logger.info("Space used by the files after compressing (in bytes): " + str(after)) logger.info("Saved space (in bytes): " + str(before - after))