def run(self): lock = lock_file.LockFile(self.lock_file) try: self._import_reads_and_update_db() except: lock.stop() raise Exception("Error immport reads or updating database") lock.stop()
def test_init_and_stop(self): '''test init and stop''' tmp_file = 'test.lock_file' utils.make_empty_file(tmp_file) with self.assertRaises(lock_file.Error): lock_file.LockFile(tmp_file) os.unlink(tmp_file) lock = lock_file.LockFile(tmp_file) self.assertTrue(os.path.exists(tmp_file)) os.unlink(tmp_file) with self.assertRaises(lock_file.Error): lock.stop() lock = lock_file.LockFile(tmp_file) self.assertTrue(os.path.exists(tmp_file)) lock.stop() self.assertFalse(os.path.exists(tmp_file))
def test_init_and_stop(self): """test init and stop""" tmp_file = "test.lock_file" utils.make_empty_file(tmp_file) with self.assertRaises(Exception): lock_file.LockFile(tmp_file) os.unlink(tmp_file) lock = lock_file.LockFile(tmp_file) self.assertTrue(os.path.exists(tmp_file)) os.unlink(tmp_file) with self.assertRaises(Exception): lock.stop() lock = lock_file.LockFile(tmp_file) self.assertTrue(os.path.exists(tmp_file)) lock.stop() self.assertFalse(os.path.exists(tmp_file))
def run(options): lock = lock_file.LockFile( os.path.join(options.pipeline_root, "generic_pipeline.lock")) database = db.Db(options.db_config_file) database.make_generic_pipeline_jobs_tsv( options.outfile, options.pipeline_root, options.pipeline_name, pipeline_version=options.pipeline_version, dataset_name=options.dataset_name, ) database.commit_and_close() lock.stop()
def run(options): lock = lock_file.LockFile(os.path.join(options.pipeline_root, 'qc.lock')) database = db.Db(options.db_config_file) database.make_qc_jobs_tsv( options.outfile, options.pipeline_root, options.reference_id, options.reference_root, pipeline_version=options.pipeline_version, dataset_name=options.dataset_name, ) database.commit_and_close() lock.stop()
def run(options): lock = lock_file.LockFile( os.path.join(options.pipeline_root, "remove_contam.lock")) database = db.Db(options.db_config_file) database.make_remove_contam_jobs_tsv( options.outfile, options.pipeline_root, options.reference_id, options.reference_root, dataset_name=options.dataset_name, ) database.commit_and_close() lock.stop()
def run(options): lock = lock_file.LockFile( os.path.join(options.pipeline_root, 'remove_contam.lock')) database = db.Db(options.db_config_file) database.make_remove_contam_jobs_tsv( options.outfile, options.pipeline_root, 0, '/fake/path/to/refs/', dataset_name=options.dataset_name, faking_it=True, ) database.commit_and_close() lock.stop()
def run(options): lock = lock_file.LockFile(os.path.join(options.pipeline_root, 'mykrobe_predict.lock')) database = db.Db(options.db_config_file) database.make_variant_call_or_mykrobe_jobs_tsv( 'mykrobe_predict', options.outfile, options.pipeline_root, options.reference_id, options.reference_root, pipeline_version=options.pipeline_version, dataset_name=options.dataset_name, ) database.commit_and_close() lock.stop()
def run(options): using_db = None not in ( options.db_config_file, options.pipeline_references_root, options.name, ) if using_db and options.outdir: print( "Error! If adding to database, must use --db_config_file,--pipeline_references_root,--name.", file=sys.stderr, ) print("Otherwise, use --outdir.", file=sys.stderr) sys.exit(1) if using_db: lock = lock_file.LockFile( os.path.join(options.pipeline_references_root, "add_reference.lock")) database = db.Db(options.db_config_file) ref_id = database.add_reference(options.name) database.commit_and_close() lock.stop() else: ref_id = None ref_dir = reference_dir.ReferenceDir( pipeline_references_root_dir=options.pipeline_references_root, reference_id=ref_id, directory=options.outdir, ) genome_is_big = options.contam_tsv is not None using_cortex = options.contam_tsv is None ref_dir.make_index_files( options.fasta_file, genome_is_big, using_cortex, cortex_mem_height=options.cortex_mem_height, ) if options.contam_tsv is not None: ref_dir.add_remove_contam_metadata_tsv(options.contam_tsv)