def test_good(self): quality_csv = StringIO("""\ tile,cycle,errorrate 2,1,1.0 2,2,7.49 """) expected_bad_cycles_csv = """\ tile,cycle,errorrate """ bad_cycles_csv = StringIO() report_bad_cycles(quality_csv, bad_cycles_csv) self.assertEqual(expected_bad_cycles_csv, bad_cycles_csv.getvalue())
def test_filter_following(self): quality_csv = StringIO("""\ tile,cycle,errorrate 2,1,7.5 2,2,1.0 """) expected_bad_cycles_csv = """\ tile,cycle,errorrate 2,1,7.5 2,2,1.0 """ bad_cycles_csv = StringIO() report_bad_cycles(quality_csv, bad_cycles_csv) self.assertEqual(expected_bad_cycles_csv, bad_cycles_csv.getvalue())
def summarize_run(args, json): """ Summarize the run data from the InterOp folder. Writes some summary files. :return: a dictionary with summary values. """ read_lengths = [json.read_length1, json.index_length1, json.index_length2, json.read_length2] summary = {} interop_path = os.path.join(args.data_path, 'input', 'runs', json.run_id, 'InterOp') phix_path = os.path.join(interop_path, 'ErrorMetricsOut.bin') quality_path = os.path.join(args.data_path, 'scratch', 'quality.csv') bad_cycles_path = os.path.join(args.data_path, 'scratch', 'bad_cycles.csv') summary_path = build_app_result_path(args.data_path, json, json.samples[0], suffix='_QC') makedirs(summary_path) bad_tiles_path = os.path.join(summary_path, 'bad_tiles.csv') with open(phix_path, 'rb') as phix, open(quality_path, 'w') as quality: records = error_metrics_parser.read_errors(phix) error_metrics_parser.write_phix_csv(quality, records, read_lengths, summary) with open(quality_path, 'rU') as quality, \ open(bad_cycles_path, 'w') as bad_cycles, \ open(bad_tiles_path, 'w') as bad_tiles: report_bad_cycles(quality, bad_cycles, bad_tiles) quality_metrics_path = os.path.join(interop_path, 'QMetricsOut.bin') quality_metrics_parser.summarize_quality(quality_metrics_path, summary, read_lengths) tile_metrics_path = os.path.join(interop_path, 'TileMetricsOut.bin') summarize_tiles(tile_metrics_path, summary) return summary
def test_tile_count(self): quality_csv = StringIO("""\ tile,cycle,errorrate 1,1,7.5 1,-1,7.5 2,1,1.0 2,-1,7.5 """) expected_bad_tiles_csv = """\ tile,bad_cycles 1,2 2,1 """ bad_cycles_csv = StringIO() bad_tiles_csv = StringIO() report_bad_cycles(quality_csv, bad_cycles_csv, bad_tiles_csv) self.assertEqual(expected_bad_tiles_csv, bad_tiles_csv.getvalue())
def summarize_run(args, json): """ Summarize the run data from the InterOp folder. Writes some summary files. :return: a dictionary with summary values. """ read_lengths = [ json.read_length1, json.index_length1, json.index_length2, json.read_length2 ] summary = {} interop_path = os.path.join(args.data_path, 'input', 'runs', json.run_id, 'InterOp') phix_path = os.path.join(interop_path, 'ErrorMetricsOut.bin') quality_path = os.path.join(args.data_path, 'scratch', 'quality.csv') bad_cycles_path = os.path.join(args.data_path, 'scratch', 'bad_cycles.csv') summary_path = build_app_result_path(args.data_path, json, json.samples[0], suffix='_QC') makedirs(summary_path) bad_tiles_path = os.path.join(summary_path, 'bad_tiles.csv') with open(phix_path, 'rb') as phix, open(quality_path, 'w') as quality: records = error_metrics_parser.read_errors(phix) error_metrics_parser.write_phix_csv(quality, records, read_lengths, summary) with open(quality_path, 'rU') as quality, \ open(bad_cycles_path, 'w') as bad_cycles, \ open(bad_tiles_path, 'w') as bad_tiles: report_bad_cycles(quality, bad_cycles, bad_tiles) quality_metrics_path = os.path.join(interop_path, 'QMetricsOut.bin') quality_metrics_parser.summarize_quality(quality_metrics_path, summary, read_lengths) tile_metrics_path = os.path.join(interop_path, 'TileMetricsOut.bin') summarize_tiles(tile_metrics_path, summary) return summary
def summarize_run(args, run_json): """ Summarize the run data from the InterOp folder. Writes some summary files. :return: a dictionary with summary values. """ read_lengths = [ run_json.read_length1, run_json.index_length1, run_json.index_length2, run_json.read_length2 ] summary = {} has_error_metrics = run_json.has_runinfo if has_error_metrics: interop_path = os.path.join(args.data_path, 'input', 'runs', run_json.run_id, 'InterOp') phix_path = os.path.join(interop_path, 'ErrorMetricsOut.bin') quality_path = os.path.join(args.data_path, 'scratch', 'quality.csv') bad_cycles_path = os.path.join(args.data_path, 'scratch', 'bad_cycles.csv') bad_tiles_path = os.path.join(args.qc_path, 'bad_tiles.csv') with open(phix_path, 'rb') as phix, open(quality_path, 'w') as quality: records = error_metrics_parser.read_errors(phix) error_metrics_parser.write_phix_csv(quality, records, read_lengths, summary) with open(quality_path, 'r') as quality, \ open(bad_cycles_path, 'w') as bad_cycles, \ open(bad_tiles_path, 'w') as bad_tiles: report_bad_cycles(quality, bad_cycles, bad_tiles) quality_metrics_path = os.path.join(interop_path, 'QMetricsOut.bin') quality_metrics_parser.summarize_quality(quality_metrics_path, summary, read_lengths) tile_metrics_path = os.path.join(interop_path, 'TileMetricsOut.bin') summarize_tiles(tile_metrics_path, summary) return summary
def summarize_run(run_info): """ Summarize the run data from the InterOp folder. Writes some summary files. :param RunInfo run_info: details of the run :return: a dictionary with summary values. """ summary = {} if run_info.read_sizes is not None: read_lengths = [run_info.read_sizes.read1, run_info.read_sizes.index1, run_info.read_sizes.index2, run_info.read_sizes.read2] phix_path = os.path.join(run_info.interop_path, 'ErrorMetricsOut.bin') with open(phix_path, 'rb') as phix, \ open(run_info.quality_csv, 'w') as quality: records = error_metrics_parser.read_errors(phix) error_metrics_parser.write_phix_csv(quality, records, read_lengths, summary) with open(run_info.quality_csv) as quality, \ open(run_info.bad_cycles_csv, 'w') as bad_cycles, \ open(run_info.bad_tiles_csv, 'w') as bad_tiles: report_bad_cycles(quality, bad_cycles, bad_tiles) quality_metrics_path = os.path.join(run_info.interop_path, 'QMetricsOut.bin') quality_metrics_parser.summarize_quality(quality_metrics_path, summary, read_lengths) tile_metrics_path = os.path.join(run_info.interop_path, 'TileMetricsOut.bin') summarize_tiles(tile_metrics_path, summary) return summary
def summarize_run(run_info): """ Summarize the run data from the InterOp folder. Writes some summary files. :param RunInfo run_info: details of the run :return: a dictionary with summary values. """ summary = {} if run_info.read_sizes is not None: read_lengths = [run_info.read_sizes.read1, run_info.read_sizes.index1, run_info.read_sizes.index2, run_info.read_sizes.read2] phix_path = os.path.join(run_info.interop_path, 'ErrorMetricsOut.bin') with open(phix_path, 'rb') as phix, \ open(run_info.quality_csv, 'w') as quality: records = error_metrics_parser.read_errors(phix) error_metrics_parser.write_phix_csv(quality, records, read_lengths, summary) with open(run_info.quality_csv) as quality, \ open(run_info.bad_cycles_csv, 'w') as bad_cycles, \ open(run_info.bad_tiles_csv, 'w') as bad_tiles: report_bad_cycles(quality, bad_cycles, bad_tiles) quality_metrics_path = os.path.join(run_info.interop_path, 'QMetricsOut.bin') quality_metrics_parser.summarize_quality(quality_metrics_path, summary, read_lengths) tile_metrics_path = os.path.join(run_info.interop_path, 'TileMetricsOut.bin') summarize_tiles(tile_metrics_path, summary) return summary