def test_flash(pipeline_dir): # Test using FLASH and parsing its log output with chdir(pipeline_dir): modify_configuration(settings=[("merge_program", "flash")]) run_snakemake(targets=["stats/reads.json"]) # Ensure FLASH was actually run assert (pipeline_dir / "reads/2-flash.log").exists()
def filtered_tab_session(tmp_path_factory): """Generate iteration-01/filtered.tab.gz""" pipeline_dir = tmp_path_factory.mktemp("pipedir") / "pipedir" init_testdata(pipeline_dir) with chdir(pipeline_dir): run_snakemake(targets=["iteration-01/filtered.tab.gz"]) return pipeline_dir
def test_primers(pipeline_dir): # Test whether specifying primer sequences leads to a SyntaxError with chdir(pipeline_dir): modify_configuration(settings=[ ("forward_primers", "['CGTGA']"), ("reverse_primers", "['TTCAC']"), ], ) run_snakemake(dryrun=True)
def test_snakemake_final(has_filtered_tab): with chdir(has_filtered_tab): run_snakemake(targets=["nofinal"]) assert (has_filtered_tab / "iteration-01/new_V_germline.tab").exists() assert not (has_filtered_tab / "final/assigned.tab.gz").exists() with chdir(has_filtered_tab): run_snakemake() assert (has_filtered_tab / "final/assigned.tab.gz").exists()
def test_fastq_input(has_filtered_tab, tmp_path): # Use merged reads from already-run pipeline as input for a new run single_reads = has_filtered_tab / "reads" / "2-merged.fastq.gz" directory = tmp_path / "singleend-fastq" run_init( database="testdata/database", single_reads=str(single_reads), directory=str(directory), ) with chdir(directory): modify_configuration([("barcode_length_3prime", "21")]) run_snakemake(targets=["stats/reads.json"])
def test_fasta_input(has_filtered_tab, tmp_path): fasta_path = tmp_path / "justfasta.fasta" convert_fastq_to_fasta( has_filtered_tab / "reads" / "2-merged.fastq.gz", fasta_path, ) directory = tmp_path / "singleend-fasta" run_init( database="testdata/database", single_reads=str(fasta_path), directory=str(directory), ) with chdir(directory): modify_configuration([("barcode_length_3prime", "21")]) run_snakemake(targets=["stats/reads.json"])
def test_only_forward_primer(pipeline_dir): # issue #107 (broken symlink) with chdir(pipeline_dir): modify_configuration(settings=[("forward_primers", "['CGTGA']")]) # Create some dummy files so we don’t need to run irrelevant steps of the pipeline r = Path("reads") r.mkdir() with xopen(r / "2-merged.fastq.gz", "w") as f: pass s = Path("stats") s.mkdir() with open(s / "merging-successful", "w") as f: pass with open(s / "reads.json", "w") as f: f.write('{"total": 0}') with open(s / "trimmed.json", "w") as f: f.write('{"trimmed": 0}') run_snakemake(targets=["reads/sequences.fasta.gz"])
def test_snakemake_exact_tab(has_filtered_tab): with chdir(has_filtered_tab): run_snakemake(targets=["iteration-01/exact.tab"]) assert (has_filtered_tab / "iteration-01/exact.tab").exists()
def test_dryrun(pipeline_dir): with chdir(pipeline_dir): run_snakemake(dryrun=True)