def test_spot_descriptor_injection(self): sra.export(self._inv_obj, self._tmp_dir, sra_settings=self._sra_default_config) with open(os.path.join(self._tmp_dir, 'experiment_set.xml'), 'rb') as out_fp: self.assertTrue('<SPOT_DESCRIPTOR>' in str(out_fp.read()))
def convert(json_fp, path, config_dir=None, sra_settings=None, datafilehashes=None, validate_first=True): """ Converter for ISA-JSON to SRA. :param json_fp: File pointer to ISA JSON input :param path: Directory for output SRA XMLs to be written :param config_dir: path to JSON configuration. If none, uses default embedded in API :param sra_settings: SRA settings dict :param datafilehashes: Data files with hashes, in a dict """ if validate_first: log.info("Validating input JSON before conversion") report = isajson.validate(fp=json_fp, config_dir=config_dir, log_level=logging.ERROR) if len(report.get('errors')) > 0: log.fatal("Could not proceed with conversion as there are some " "validation errors. Check log.") return log.info("Loading isajson {}".format(json_fp.name)) isa = isajson.load(fp=json_fp) log.info("Exporting SRA to {}".format(path)) log.debug("Using SRA settings ".format(sra_settings)) sra.export(isa, path, sra_settings=sra_settings, datafilehashes=datafilehashes)
def test_sra_paired_export_run_set_xml(self): sra.export(self._inv_obj, self._tmp_dir, sra_settings=self._sra_default_config) with open(os.path.join(self._tmp_dir, 'run_set.xml'), 'rb') as out_fp: actual_run_set_xml_obj = etree.fromstring(out_fp.read()) self.assertTrue( utils.assert_xml_equal(self._expected_run_set_xml_obj, actual_run_set_xml_obj))
def test_sra_export_sample_set_xml(self): sra.export(self._inv_obj, self._tmp_dir, sra_settings=self._sra_default_config) actual_sample_set_xml_obj = etree.fromstring( open(os.path.join(self._tmp_dir, 'sample_set.xml'), 'rb').read()) self.assertTrue( utils.assert_xml_equal(self._expected_sample_set_xml_obj, actual_sample_set_xml_obj))
def convert(json_fp, path, config_dir=None, sra_settings=None, datafilehashes=None, validate_first=True): """ (New) Converter for ISA JSON to SRA. :param json_fp: File pointer to ISA JSON input :param path: Directory for output to be written :param config_dir: path to JSON configuration. If none, uses default embedded in API :param sra_settings: SRA settings dict :param datafilehashes: Data files with hashes, in a dict """ if validate_first: log_msg_stream = isajson.validate(fp=json_fp, config_dir=config_dir, log_level=logging.WARNING) if '(E)' in log_msg_stream.getvalue(): logger.fatal("Could not proceed with conversion as there are some validation errors. Check log.") return i = isajson.load(fp=json_fp) sra.export(i, path, sra_settings=sra_settings, datafilehashes=datafilehashes)
def convert2(json_fp, path, config_dir=None, sra_settings=None, datafilehashes=None, validate_first=True): """ (New) Converter for ISA JSON to SRA. :param json_fp: File pointer to ISA JSON input :param path: Directory for output to be written :param config_dir: path to JSON configuration. If none, uses default embedded in API :param sra_settings: SRA settings dict :param datafilehashes: Data files with hashes, in a dict """ if validate_first: log_msg_stream = isajson.validate(fp=json_fp, config_dir=config_dir, log_level=logging.WARNING) if '(E)' in log_msg_stream.getvalue(): logger.fatal("Could not proceed with conversion as there are some validation errors. Check log.") return i = isajson.load(fp=json_fp) sra.export(i, path, sra_settings=sra_settings, datafilehashes=datafilehashes)
def test_sra_export_project_set_xml(self): sra.export(self._inv_obj, self._tmp_dir, sra_settings=self._sra_default_config) actual_project_set_xml_obj = etree.fromstring(open(os.path.join(self._tmp_dir, 'project_set.xml'), 'rb').read()) self.assertTrue(utils.assert_xml_equal(self._expected_project_set_xml_obj, actual_project_set_xml_obj))