def test_logger_from_conf(self): with tempfile.TemporaryDirectory() as folder: for logger_name, (mode, level, initialiser, streaming) in enumerate( itertools.product(["a", "w"], ["WARNING", "INFO", "DEBUG"], (LoggingConfiguration, MikadoConfiguration), (False, True))): with tempfile.NamedTemporaryFile(mode="wt", dir=folder, suffix=".log") as log: if streaming is True: name = None else: name = log.name if initialiser == LoggingConfiguration: conf = LoggingConfiguration() conf.log = name conf.log_level = level else: conf = MikadoConfiguration() conf.log_settings.log = name conf.log_settings.log_level = level logger = create_logger_from_conf(conf, name="test_logger_from_conf" + str(logger_name), mode=mode) self.assertTrue(os.path.exists(log.name) or streaming) self.assertTrue(os.path.exists(log.name) != streaming) # Either we have created a streaming log or not self.assertEqual(logging.getLevelName(logger.level), level, conf) if streaming: self.assertIsInstance(logger.handlers[0], logging.StreamHandler) else: self.assertIsInstance(logger.handlers[0], logging.FileHandler) self.assertEqual(logger.handlers[0].mode, mode) if streaming is False and os.path.exists(log.name): os.remove(log.name)
def test_asn(self): # Currently DISABLED because the ASN specifications requires the database to be where indicated by the # relative path within the ASN. So for the time being this test is *not active*. asns = [ os.path.join("blast", "asn", "blast.asn.gz"), os.path.join("blast_parse_seqids", "asn", "blast.asn.gz") ] for folder in ["sanitised", "uniprot"]: conf = MikadoConfiguration() targets = os.path.join(self.master, folder, "uniprot.fasta") self.assertTrue(os.path.exists(targets)) for asn in asns: asn = os.path.join(self.master, folder, asn) self.assertTrue(os.path.exists(asn)) with tempfile.TemporaryDirectory() as out_folder: conf.serialise.files.output_dir = out_folder conf.serialise.files.blast_targets = [targets] conf.serialise.files.transcripts = self.queries conf.serialise.files.xml = [asn] logger = create_default_logger(f"test_asn_{folder}") out_db = os.path.join(out_folder, conf.db_settings.db) conf.db_settings.db = out_db self.run_loading(asn, out_db, logger, conf)
def setUp(self) -> None: self.config = MikadoConfiguration() self.ref = pkg_resources.resource_filename("Mikado.tests", "reference.gff3") self.trinity = pkg_resources.resource_filename("Mikado.tests", "trinity.bed12") self.pacbio = pkg_resources.resource_filename("Mikado.tests", "pacbio.bam") self.files = { "ref": {"filename": self.ref, "label": "ref", "stranded": True, "score": 10, "exclude_redundant": False, "strip_cds": False, "skip_split": True, "reference": True}, "pacbio": {"filename": self.pacbio, "label": "pacbio", "stranded": True, "score": 5, "exclude_redundant": True, "strip_cds": False, "skip_split": True, "reference": False}, "trinity": {"filename": self.trinity, "label": "trinity", "stranded": False, "score": -1, "exclude_redundant": True, "strip_cds": True, "skip_split": True, "reference": False}, }
def test_daa(self): daa_base = os.path.join("diamond", "daa", "blast.daa") for folder in ["sanitised", "uniprot"]: targets = os.path.join(self.master, folder, "uniprot.fasta") self.assertTrue(os.path.exists(targets)) daa = os.path.join(self.master, folder, daa_base) self.assertTrue(os.path.exists(daa)) with tempfile.TemporaryDirectory() as out_folder: conf = MikadoConfiguration() out_db = os.path.join(out_folder, conf.db_settings.db) conf.db_settings.db = out_db conf.serialise.files.output_dir = out_folder conf.serialise.files.blast_targets = [targets] conf.serialise.files.transcripts = self.queries conf.serialise.files.xml = [daa] logger = create_default_logger(f"test_daa_{folder}") self.assertTrue(tempfile.gettempdir() in out_db) self.run_loading(daa, out_db, logger, conf)
def test_xml(self): xml_base = os.path.join("xml", "blast.xml.gz") for folder in ["sanitised", "uniprot"]: for subfolder in ["blast", "blast_parse_seqids", "diamond"]: targets = os.path.join(self.master, folder, "uniprot.fasta") self.assertTrue(os.path.exists(targets)) xml = os.path.join(self.master, folder, subfolder, xml_base) self.assertTrue(os.path.exists(xml)) with tempfile.TemporaryDirectory() as out_folder: conf = MikadoConfiguration() conf.serialise.files.output_dir = out_folder out_db = os.path.join(out_folder, conf.db_settings.db) conf.db_settings.db = out_db conf.serialise.files.blast_targets = [targets] conf.serialise.files.transcripts = self.queries conf.serialise.files.xml = [xml] logger = create_default_logger( f"test_xml_{folder}_{subfolder}") self.assertTrue(tempfile.gettempdir() in out_db) self.run_loading(xml, out_db, logger, conf)
def test_tsv(self): tsv_base = os.path.join("tsv", "blast.tsv.gz") for folder in ["uniprot", "sanitised"]: for subfolder in ["blast", "blast_parse_seqids", "diamond"]: targets = os.path.join(self.master, folder, "uniprot.fasta") self.assertTrue(os.path.exists(targets)) tsv = os.path.join(self.master, folder, subfolder, tsv_base) self.assertTrue(os.path.exists(tsv)) with tempfile.TemporaryDirectory() as out_folder: conf = MikadoConfiguration() conf.serialise.files.output_dir = out_folder conf.serialise.files.blast_targets = [targets] conf.serialise.files.transcripts = self.queries conf.serialise.files.xml = [tsv] out_db = os.path.join(out_folder, conf.db_settings.db) conf.db_settings.db = out_db self.assertTrue(tempfile.gettempdir() in out_db) logger = create_default_logger( f"test_tsv_{folder}_{subfolder}", level="DEBUG") self.run_loading(tsv, out_db, logger, conf)