os.remove(x) except: pass def complete(self): exists = any([os.path.exists(x) for x in self.unglob]) return self.clone_parent().complete() and not exists def output(self): return self.input() if __name__ == '__main__': multiprocessing.set_start_method('forkserver') logger, alloc_log = utils.logging_init( log_dir=os.path.join(os.getcwd(), 'logs'), pipeline_name=os.path.basename(__file__)) with open(sys.argv[1], 'r') as libs_file: lib_list = [line.rstrip() for line in libs_file] name = os.path.split(sys.argv[1])[1].split('.', 1)[0] luigi.run([ 'CleanUpCallset', '--output-prefix', name, '--lib-list', json.dumps(lib_list), '--star-genome', os.path.join(utils.reference_dir, 'genome'), '--reference', os.path.join(utils.reference_dir, 'PST130_contigs.fasta'), '--mask', os.path.join(utils.reference_dir, 'PST130_RNASeq_collapsed_exons.bed') ] + sys.argv[3:])
# This is a bit of a hack, it allows us to pass parameters to LibraryBatchWrapper and have them propagate # down to all calls to PerLibPipeline. library = None def requires(self): return [ self.clone_parent(library=lib.rstrip()) for lib in self.lib_list ] def output(self): return self.input() # ----------------------------------------------------------------------- # if __name__ == '__main__': multiprocessing.set_start_method('forkserver') logger, alloc_log = utils.logging_init(log_dir=os.path.join( os.getcwd(), 'logs'), pipeline_name=PIPELINE) with open(sys.argv[1], 'r') as libs_file: lib_list = [line.rstrip() for line in libs_file] luigi.run([ 'LibraryBatchWrapper', '--lib-list', json.dumps(lib_list), '--star-genome', os.path.join(utils.reference_dir, 'genome'), '--reference', os.path.join(utils.reference_dir, 'PST130_contigs.fasta') ] + sys.argv[2:])