def test_write_read_manifest_with_manifest(self): manifest_filename = os.path.join(self.temp_dir, 'test.xml') contents = [ data_model.CombineArchiveContent( location='1.txt', format='http://purl.org/NET/mediatypes/plain/text', master=False, ), data_model.CombineArchiveContent( location='2.jpg', format='http://purl.org/NET/mediatypes/image/jpeg', master=True, ), data_model.CombineArchiveContent( location='manifest.xml', format=data_model.CombineArchiveContentFormat.OMEX_MANIFEST, master=False, ), ] io.CombineArchiveWriter().write_manifest(contents, manifest_filename) contents_2 = io.CombineArchiveReader().read_manifest(manifest_filename) archive = data_model.CombineArchive(contents=contents) archive_2 = data_model.CombineArchive(contents=contents_2) self.assertTrue(archive_2.is_equal(archive))
def _build_combine_archive(self, algorithm=None): doc = self._build_sed_doc(algorithm=algorithm) archive_dirname = os.path.join(self.dirname, 'archive') if not os.path.isdir(archive_dirname): os.mkdir(archive_dirname) model_filename = os.path.join(archive_dirname, 'model_1.bngl') shutil.copyfile( os.path.join(os.path.dirname(__file__), 'fixtures', 'test.bngl'), model_filename) sim_filename = os.path.join(archive_dirname, 'sim_1.sedml') SedmlSimulationWriter().run(doc, sim_filename) archive = combine_data_model.CombineArchive(contents=[ combine_data_model.CombineArchiveContent( 'model_1.bngl', combine_data_model.CombineArchiveContentFormat.BNGL.value), combine_data_model.CombineArchiveContent( 'sim_1.sedml', combine_data_model.CombineArchiveContentFormat.SED_ML.value), ], ) archive_filename = os.path.join( self.dirname, 'archive.omex' if algorithm is None else 'archive-{}.omex'.format(algorithm.kisao_id)) CombineArchiveWriter().run(archive, archive_dirname, archive_filename) return (doc, archive_filename)
def test_read_from_plain_zip_archive(self): in_dir = os.path.join(self.temp_dir, 'in') os.mkdir(in_dir) sim_path = os.path.join(in_dir, 'simulation.sedml') model_path = os.path.join(in_dir, 'model.xml') archive_filename = os.path.join(in_dir, 'archive.zip') with open(sim_path, 'w'): pass with open(model_path, 'w'): pass archive = Archive(files=[ ArchiveFile(local_path=sim_path, archive_path='simulation.sedml'), ArchiveFile(local_path=model_path, archive_path='model.xml'), ]) ArchiveWriter().run(archive, archive_filename) zip_out_dir = os.path.join(self.temp_dir, 'out_zip') with self.assertRaisesRegex(ValueError, 'not a valid COMBINE/OMEX archive'): io.CombineArchiveReader().run(archive_filename, zip_out_dir) config = Config(VALIDATE_OMEX_MANIFESTS=False) archive = io.CombineArchiveReader().run(archive_filename, zip_out_dir, config=config) combine_archive = io.CombineArchiveZipReader().run( archive_filename, zip_out_dir) expected_combine_archive = data_model.CombineArchive(contents=[ data_model.CombineArchiveContent( location='simulation.sedml', format=data_model.CombineArchiveContentFormat.SED_ML.value), data_model.CombineArchiveContent(location='model.xml'), ]) self.assertTrue(combine_archive.is_equal(expected_combine_archive)) combine_out_dir = os.path.join(self.temp_dir, 'out_combine') config = Config(VALIDATE_OMEX_MANIFESTS=False) combine_archive = io.CombineArchiveReader().run(archive_filename, combine_out_dir, config=config) self.assertTrue(combine_archive.is_equal(expected_combine_archive)) # error handling with self.assertRaisesRegex(ValueError, 'not a valid zip archive'): io.CombineArchiveZipReader().run(sim_path, zip_out_dir) config = Config(VALIDATE_OMEX_MANIFESTS=False) with self.assertRaisesRegex(ValueError, 'not a valid COMBINE/OMEX archive'): io.CombineArchiveReader().run(sim_path, zip_out_dir, config=config) config = Config(VALIDATE_OMEX_MANIFESTS=True) with self.assertRaisesRegex(ValueError, 'not a valid COMBINE/OMEX archive'): io.CombineArchiveReader().run(sim_path, zip_out_dir, config=config)
def test_no_updated_date(self): format = 'https://spec-url-for-format' content = data_model.CombineArchiveContent('1.txt', format, False) archive = data_model.CombineArchive([content]) archive_file = os.path.join(self.temp_dir, 'test.omex') in_dir = os.path.join(self.temp_dir, 'in') out_dir = os.path.join(self.temp_dir, 'out') os.mkdir(in_dir) os.mkdir(out_dir) with open(os.path.join(in_dir, content.location), 'w') as file: file.write('a') io.CombineArchiveWriter().run(archive, in_dir, archive_file) archive_b = io.CombineArchiveReader().run(archive_file, out_dir) archive_b.contents = list( filter( lambda content: content.format != data_model. CombineArchiveContentFormat.OMEX_METADATA, archive_b.contents)) self.assertTrue(archive.is_equal(archive_b)) self.assertEqual(sorted(os.listdir(out_dir)), sorted([ content.location, 'manifest.xml', ])) with open(os.path.join(out_dir, content.location), 'r') as file: self.assertEqual('a', file.read())
def test_get_sedml_contents(self): archive = data_model.CombineArchive(contents=[ data_model.CombineArchiveContent(location='file_1', format=data_model.CombineArchiveContentFormat.SED_ML, master=True), data_model.CombineArchiveContent(location='file_2', format=data_model.CombineArchiveContentFormat.SED_ML, master=False), data_model.CombineArchiveContent(location='file_3', format=data_model.CombineArchiveContentFormat.SBML, master=False), data_model.CombineArchiveContent(location='file_4', format=data_model.CombineArchiveContentFormat.BNGL, master=False), ]) self.assertEqual(utils.get_sedml_contents(archive), archive.contents[0:1]) self.assertEqual(utils.get_sedml_contents(archive, always_include_all_sed_docs=True), archive.contents[0:2]) archive.contents[0].master = False self.assertEqual(utils.get_sedml_contents(archive), archive.contents[0:2]) self.assertEqual(utils.get_sedml_contents(archive, always_include_all_sed_docs=True), archive.contents[0:2]) archive.contents[2].master = True self.assertEqual(utils.get_sedml_contents(archive), archive.contents[0:2]) self.assertEqual(utils.get_sedml_contents(archive, include_all_sed_docs_when_no_sed_doc_is_master=False), [])
def test_get_summary_sedml_contents(self): archive = data_model.CombineArchive(contents=[ data_model.CombineArchiveContent(location='./exp_2.sedml', format=data_model.CombineArchiveContentFormat.SED_ML, master=False), data_model.CombineArchiveContent(location='./exp_1.sedml', format=data_model.CombineArchiveContentFormat.SED_ML, master=False), ]) exp_1 = sedml_data_model.SedDocument() model_1 = sedml_data_model.Model(id='model_1', language=sedml_data_model.ModelLanguage.SBML.value, source='./model.xml') exp_1.models.append(model_1) sim_1 = sedml_data_model.SteadyStateSimulation(id='sim_1', algorithm=sedml_data_model.Algorithm(kisao_id='KISAO_0000019')) exp_1.simulations.append(sim_1) task_1 = sedml_data_model.Task(id='task_1', model=model_1, simulation=sim_1) task_2 = sedml_data_model.Task(id='task_2', model=model_1, simulation=sim_1) exp_1.tasks.append(task_1) exp_1.tasks.append(task_2) exp_1.outputs.append(sedml_data_model.Report(id='report_1')) exp_1.outputs.append(sedml_data_model.Plot2D(id='plot_2')) SedmlSimulationWriter().run(exp_1, os.path.join(self.dirname, 'exp_1.sedml'), validate_semantics=False, validate_models_with_languages=False) exp_2 = sedml_data_model.SedDocument() model_2 = sedml_data_model.Model(id='model_2', language=sedml_data_model.ModelLanguage.SBML.value, source='./model.xml') exp_2.models.append(model_2) sim_2 = sedml_data_model.SteadyStateSimulation(id='sim_2', algorithm=sedml_data_model.Algorithm(kisao_id='KISAO_0000019')) exp_2.simulations.append(sim_2) task_3 = sedml_data_model.Task(id='task_3', model=model_2, simulation=sim_2) exp_2.tasks.append(task_3) exp_2.outputs.append(sedml_data_model.Report(id='report_3')) exp_2.outputs.append(sedml_data_model.Plot3D(id='plot_5')) exp_2.outputs.append(sedml_data_model.Plot2D(id='plot_4')) SedmlSimulationWriter().run(exp_2, os.path.join(self.dirname, 'exp_2.sedml'), validate_semantics=False, validate_models_with_languages=False) with mock.patch('biosimulators_utils.sedml.validation.validate_output', return_value=([], [])): summary = utils.get_summary_sedml_contents(archive, self.dirname) self.assertTrue(summary.startswith( 'Archive contains 2 SED-ML documents with 2 models, 2 simulations, 3 tasks, 2 reports, and 3 plots:\n')) self.assertGreater(summary.index('exp_2.sedml'), summary.index('exp_1.sedml')) self.assertGreater(summary.index('plot_5'), summary.index('plot_4'))
def test_content(self): location = 'path_to_file' format = 'https://spec-url-for-format' content = data_model.CombineArchiveContent(location, format) self.assertEqual(content.location, location) self.assertEqual(content.format, format) self.assertEqual(content.master, False) location = 'path_to_file' format = 'https://spec-url-for-format' content = data_model.CombineArchiveContent(location, format) self.assertEqual(content.location, location) self.assertEqual(content.format, format) self.assertEqual(content.master, False) content2 = data_model.CombineArchiveContent(location, format, True) self.assertEqual(content2.to_tuple(), (location, format, True)) content3 = data_model.CombineArchiveContent(location, format, True) self.assertTrue(content.is_equal(content)) self.assertFalse(content.is_equal(content2)) self.assertTrue(content2.is_equal(content2)) self.assertTrue(content2.is_equal(content3))
def test_archive(self): location = 'path_to_file' format = 'https://spec-url-for-format' content1 = data_model.CombineArchiveContent(location, format, False) content2 = data_model.CombineArchiveContent(location, format, True) archive = data_model.CombineArchive() self.assertEqual(archive.contents, []) archive = data_model.CombineArchive([content1, content2]) self.assertEqual(archive.contents, [content1, content2]) self.assertEqual(archive.to_tuple(), ((content1.to_tuple(), content2.to_tuple()))) archive2 = data_model.CombineArchive([content2, content1]) archive3 = data_model.CombineArchive([content1, content1]) self.assertTrue(archive.is_equal(archive2)) self.assertFalse(archive.is_equal(archive3)) archive4 = data_model.CombineArchive([content2, content2]) self.assertEqual(archive.get_master_content(), [content2]) self.assertEqual(archive3.get_master_content(), []) self.assertEqual(archive4.get_master_content(), [content2, content2])
def test_init_combine_archive_log(self): archive = combine_data_model.CombineArchive(contents=[ combine_data_model.CombineArchiveContent( location='./exp_2.sedml', format=combine_data_model.CombineArchiveContentFormat.SED_ML, master=False), combine_data_model.CombineArchiveContent( location='./exp_1.sedml', format=combine_data_model.CombineArchiveContentFormat.SED_ML, master=False), ]) exp_1 = sedml_data_model.SedDocument() model_1 = sedml_data_model.Model( id='model_1', language=sedml_data_model.ModelLanguage.SBML.value, source='./model.xml') exp_1.models.append(model_1) sim_1 = sedml_data_model.SteadyStateSimulation( id='sim_1', algorithm=sedml_data_model.Algorithm(kisao_id='KISAO_0000019')) exp_1.simulations.append(sim_1) task_1 = sedml_data_model.Task(id='task_1', model=model_1, simulation=sim_1) task_2 = sedml_data_model.Task(id='task_2', model=model_1, simulation=sim_1) exp_1.tasks.append(task_1) exp_1.tasks.append(task_2) data_gen_1 = sedml_data_model.DataGenerator( id='data_gen_1', math='param_1', parameters=[sedml_data_model.Parameter(id='param_1', value=1.)]) data_gen_2 = sedml_data_model.DataGenerator( id='data_gen_2', math='param_2', parameters=[sedml_data_model.Parameter(id='param_2', value=2.)]) exp_1.data_generators.append(data_gen_1) exp_1.data_generators.append(data_gen_2) exp_1.outputs.append( sedml_data_model.Report( id='report_1', data_sets=[ sedml_data_model.DataSet(id='data_set_1', label='data_set_1', data_generator=data_gen_1), sedml_data_model.DataSet(id='data_set_2', label='data_set_2', data_generator=data_gen_2), ])) exp_1.outputs.append( sedml_data_model.Plot2D( id='plot_2', curves=[ sedml_data_model.Curve( id='curve_1', x_data_generator=data_gen_1, y_data_generator=data_gen_1, x_scale=sedml_data_model.AxisScale.log, y_scale=sedml_data_model.AxisScale.log), sedml_data_model.Curve( id='curve_2', x_data_generator=data_gen_2, y_data_generator=data_gen_2, x_scale=sedml_data_model.AxisScale.log, y_scale=sedml_data_model.AxisScale.log), ])) SedmlSimulationWriter().run(exp_1, os.path.join(self.dirname, 'exp_1.sedml'), validate_models_with_languages=False) exp_2 = sedml_data_model.SedDocument() model_2 = sedml_data_model.Model( id='model_2', language=sedml_data_model.ModelLanguage.SBML.value, source='./model.xml') exp_2.models.append(model_2) sim_2 = sedml_data_model.SteadyStateSimulation( id='sim_2', algorithm=sedml_data_model.Algorithm(kisao_id='KISAO_0000019')) exp_2.simulations.append(sim_2) task_3 = sedml_data_model.Task(id='task_3', model=model_2, simulation=sim_2) exp_2.tasks.append(task_3) data_gen_3 = sedml_data_model.DataGenerator( id='data_gen_3', math='param_3', parameters=[sedml_data_model.Parameter(id='param_3', value=1.)]) data_gen_4 = sedml_data_model.DataGenerator( id='data_gen_4', math='param_4', parameters=[sedml_data_model.Parameter(id='param_4', value=2.)]) exp_2.data_generators.append(data_gen_3) exp_2.data_generators.append(data_gen_4) exp_2.outputs.append(sedml_data_model.Report(id='report_3')) exp_2.outputs.append(sedml_data_model.Plot2D(id='plot_4')) exp_2.outputs.append( sedml_data_model.Plot3D( id='plot_5', surfaces=[ sedml_data_model.Surface( id='surface_1', x_data_generator=data_gen_3, y_data_generator=data_gen_3, z_data_generator=data_gen_4, x_scale=sedml_data_model.AxisScale.log, y_scale=sedml_data_model.AxisScale.log, z_scale=sedml_data_model.AxisScale.log), ])) SedmlSimulationWriter().run(exp_2, os.path.join(self.dirname, 'exp_2.sedml'), validate_semantics=False, validate_models_with_languages=False) status = utils.init_combine_archive_log( archive, self.dirname, logged_features=( sedml_data_model.SedDocument, sedml_data_model.Task, sedml_data_model.Report, sedml_data_model.Plot2D, sedml_data_model.Plot3D, sedml_data_model.DataSet, sedml_data_model.Curve, sedml_data_model.Surface, ), supported_features=( sedml_data_model.SedDocument, sedml_data_model.Task, sedml_data_model.Report, sedml_data_model.DataSet, ), ) expected = { 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'sedDocuments': [ { 'location': 'exp_2.sedml', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'tasks': [ { 'id': 'task_3', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None, }, ], 'outputs': [ { 'id': 'report_3', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'dataSets': [] }, { 'id': 'plot_4', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'curves': [] }, { 'id': 'plot_5', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'surfaces': [ { 'id': 'surface_1', 'status': 'SKIPPED' }, ] }, ], }, { 'location': 'exp_1.sedml', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'tasks': [ { 'id': 'task_1', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None }, { 'id': 'task_2', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None }, ], 'outputs': [ { 'id': 'report_1', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'dataSets': [ { 'id': 'data_set_1', 'status': 'QUEUED' }, { 'id': 'data_set_2', 'status': 'QUEUED' }, ] }, { 'id': 'plot_2', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'curves': [ { 'id': 'curve_1', 'status': 'SKIPPED' }, { 'id': 'curve_2', 'status': 'SKIPPED' }, ] }, ], }, ], } self.assertEqual(status.to_json()['sedDocuments'][1]['outputs'][1], expected['sedDocuments'][1]['outputs'][1]) self.assertEqual(status.sed_documents['exp_1.sedml'].parent, status) self.assertEqual( status.sed_documents['exp_1.sedml'].tasks['task_1'].parent, status.sed_documents['exp_1.sedml']) self.assertEqual( status.sed_documents['exp_1.sedml'].outputs['report_1'].parent, status.sed_documents['exp_1.sedml']) status = utils.init_combine_archive_log(archive, self.dirname) for doc in status.sed_documents.values(): doc.status = data_model.Status.QUEUED for task in doc.tasks.values(): task.status = data_model.Status.QUEUED for output in doc.outputs.values(): output.status = data_model.Status.QUEUED if isinstance(output, data_model.ReportLog): els = output.data_sets elif isinstance(output, data_model.Plot2DLog): els = output.curves elif isinstance(output, data_model.Plot3DLog): els = output.surfaces else: raise ValueError(output.__class__) for id in els.keys(): els[id] = data_model.Status.QUEUED status.finalize() self.assertEqual( status.to_json(), { 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'sedDocuments': [ { 'location': 'exp_2.sedml', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'tasks': [ { 'id': 'task_3', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None }, ], 'outputs': [ { 'id': 'report_3', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'dataSets': [] }, { 'id': 'plot_4', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'curves': [] }, { 'id': 'plot_5', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'surfaces': [ { 'id': 'surface_1', 'status': 'SKIPPED' }, ] }, ], }, { 'location': 'exp_1.sedml', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'tasks': [ { 'id': 'task_1', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None }, { 'id': 'task_2', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None }, ], 'outputs': [ { 'id': 'report_1', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'dataSets': [ { 'id': 'data_set_1', 'status': 'SKIPPED' }, { 'id': 'data_set_2', 'status': 'SKIPPED' }, ] }, { 'id': 'plot_2', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'curves': [ { 'id': 'curve_1', 'status': 'SKIPPED' }, { 'id': 'curve_2', 'status': 'SKIPPED' }, ] }, ], }, ], }) status = utils.init_combine_archive_log(archive, self.dirname) status.status = data_model.Status.RUNNING for doc in status.sed_documents.values(): doc.status = data_model.Status.RUNNING for task in doc.tasks.values(): task.status = data_model.Status.RUNNING for output in doc.outputs.values(): output.status = data_model.Status.RUNNING if isinstance(output, data_model.ReportLog): els = output.data_sets elif isinstance(output, data_model.Plot2DLog): els = output.curves elif isinstance(output, data_model.Plot3DLog): els = output.surfaces else: raise ValueError(output.__class__) for id in els.keys(): els[id] = data_model.Status.RUNNING status.finalize() self.assertEqual( status.to_json(), { 'status': 'FAILED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'sedDocuments': [ { 'location': 'exp_2.sedml', 'status': 'FAILED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'tasks': [ { 'id': 'task_3', 'status': 'FAILED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None }, ], 'outputs': [ { 'id': 'report_3', 'status': 'FAILED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'dataSets': [] }, { 'id': 'plot_4', 'status': 'FAILED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'curves': [] }, { 'id': 'plot_5', 'status': 'FAILED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'surfaces': [ { 'id': 'surface_1', 'status': 'FAILED' }, ] }, ], }, { 'location': 'exp_1.sedml', 'status': 'FAILED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'tasks': [ { 'id': 'task_1', 'status': 'FAILED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None }, { 'id': 'task_2', 'status': 'FAILED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None }, ], 'outputs': [ { 'id': 'report_1', 'status': 'FAILED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'dataSets': [ { 'id': 'data_set_1', 'status': 'FAILED' }, { 'id': 'data_set_2', 'status': 'FAILED' }, ] }, { 'id': 'plot_2', 'status': 'FAILED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'curves': [ { 'id': 'curve_1', 'status': 'FAILED' }, { 'id': 'curve_2', 'status': 'FAILED' }, ] }, ], }, ], }) # test logging subsets of possible features -- no data sets, curves, surfaces status = utils.init_combine_archive_log( archive, self.dirname, logged_features=( sedml_data_model.SedDocument, sedml_data_model.Task, sedml_data_model.Report, sedml_data_model.Plot2D, sedml_data_model.Plot3D, ), supported_features=( sedml_data_model.SedDocument, sedml_data_model.Task, sedml_data_model.Report, sedml_data_model.DataSet, ), ) self.assertEqual( status.to_json(), { 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'sedDocuments': [ { 'location': 'exp_2.sedml', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'tasks': [ { 'id': 'task_3', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None, }, ], 'outputs': [ { 'id': 'report_3', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'dataSets': None }, { 'id': 'plot_4', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'curves': None }, { 'id': 'plot_5', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'surfaces': None }, ], }, { 'location': 'exp_1.sedml', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'tasks': [ { 'id': 'task_1', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None }, { 'id': 'task_2', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None }, ], 'outputs': [ { 'id': 'report_1', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'dataSets': None }, { 'id': 'plot_2', 'status': 'SKIPPED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'curves': None }, ], }, ], }) # test logging subsets of possible features -- no plots status = utils.init_combine_archive_log( archive, self.dirname, logged_features=( sedml_data_model.SedDocument, sedml_data_model.Task, sedml_data_model.Report, ), supported_features=( sedml_data_model.SedDocument, sedml_data_model.Task, sedml_data_model.Report, sedml_data_model.DataSet, ), ) self.assertEqual( status.to_json(), { 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'sedDocuments': [ { 'location': 'exp_2.sedml', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'tasks': [ { 'id': 'task_3', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None, }, ], 'outputs': [ { 'id': 'report_3', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'dataSets': None }, ], }, { 'location': 'exp_1.sedml', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'tasks': [ { 'id': 'task_1', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None }, { 'id': 'task_2', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None }, ], 'outputs': [ { 'id': 'report_1', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'dataSets': None }, ], }, ], }) # test logging subsets of possible features -- no outputs status = utils.init_combine_archive_log( archive, self.dirname, logged_features=( sedml_data_model.SedDocument, sedml_data_model.Task, ), supported_features=( sedml_data_model.SedDocument, sedml_data_model.Task, sedml_data_model.Report, sedml_data_model.DataSet, ), ) self.assertEqual( status.to_json(), { 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'sedDocuments': [ { 'location': 'exp_2.sedml', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'tasks': [ { 'id': 'task_3', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None, }, ], 'outputs': None, }, { 'location': 'exp_1.sedml', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'tasks': [ { 'id': 'task_1', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None }, { 'id': 'task_2', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'algorithm': None, 'simulatorDetails': None }, ], 'outputs': None, }, ], }) # test logging subsets of possible features -- no tasks or outputs status = utils.init_combine_archive_log( archive, self.dirname, logged_features=(sedml_data_model.SedDocument, ), supported_features=( sedml_data_model.SedDocument, sedml_data_model.Task, sedml_data_model.Report, sedml_data_model.DataSet, ), ) self.assertEqual( status.to_json(), { 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'sedDocuments': [ { 'location': 'exp_2.sedml', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'tasks': None, 'outputs': None, }, { 'location': 'exp_1.sedml', 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'tasks': None, 'outputs': None, }, ], }) # test logging subsets of possible features -- no SED documents status = utils.init_combine_archive_log( archive, self.dirname, logged_features=(), supported_features=( sedml_data_model.SedDocument, sedml_data_model.Task, sedml_data_model.Report, sedml_data_model.DataSet, ), ) self.assertEqual( status.to_json(), { 'status': 'QUEUED', 'exception': None, 'skipReason': None, 'output': None, 'duration': None, 'sedDocuments': None, })
def test(self): format = 'https://spec-url-for-format' content1 = data_model.CombineArchiveContent('1.txt', format, False) content2 = data_model.CombineArchiveContent('2/2.txt', format, True) archive1 = data_model.CombineArchive([content1, content2]) archive2 = data_model.CombineArchive([content1, content1]) archive_file = os.path.join(self.temp_dir, 'test.omex') in_dir = os.path.join(self.temp_dir, 'in') out_dir = os.path.join(self.temp_dir, 'out') out_dir2 = os.path.join(self.temp_dir, 'out2') os.mkdir(in_dir) os.mkdir(os.path.join(in_dir, '2')) os.mkdir(out_dir) with open(os.path.join(in_dir, content1.location), 'w') as file: file.write('a') with open(os.path.join(in_dir, content2.location), 'w') as file: file.write('b') io.CombineArchiveWriter().run(archive1, in_dir, archive_file) archive1b = io.CombineArchiveReader().run( archive_file, out_dir, include_omex_metadata_files=False) self.assertTrue(archive1.is_equal(archive1b)) archive1b = io.CombineArchiveReader().run( archive_file, out_dir, include_omex_metadata_files=True) metadata_contents = [ content.location for content in archive1b.contents if re.match( data_model.CombineArchiveContentFormatPattern.OMEX_METADATA. value, content.format) ] self.assertEqual(metadata_contents, []) self.assertEqual( sorted(os.listdir(out_dir)), sorted([ content1.location, os.path.dirname(content2.location), 'manifest.xml', ])) with open(os.path.join(out_dir, content1.location), 'r') as file: self.assertEqual('a', file.read()) with open(os.path.join(out_dir, content2.location), 'r') as file: self.assertEqual('b', file.read()) io.CombineArchiveWriter().run(archive2, in_dir, archive_file) archive2b = io.CombineArchiveReader().run( archive_file, out_dir2, include_omex_metadata_files=False) self.assertTrue(archive2.is_equal(archive2b)) self.assertEqual(sorted(os.listdir(out_dir2)), sorted([ content1.location, 'manifest.xml', ])) with open(os.path.join(out_dir2, content1.location), 'r') as file: self.assertEqual('a', file.read()) with self.assertRaisesRegex(ValueError, 'is not a file'): io.CombineArchiveReader().run( os.path.join(self.temp_dir, 'test2.omex'), out_dir)
def test_write_error_handling(self): now = datetime.datetime(2020, 1, 2, 1, 2, 3, tzinfo=dateutil.tz.tzutc()) content = data_model.CombineArchiveContent('1.txt', 'plain/text', False) with open(os.path.join(self.temp_dir, content.location), 'w') as file: pass archive = data_model.CombineArchive([content]) archive_file = os.path.join(self.temp_dir, 'archive.omex') with self.assertRaisesRegex(Exception, 'could not be saved'): with mock.patch.object(libcombine.CombineArchive, 'writeToFile', return_value=False): io.CombineArchiveWriter().run(archive, self.temp_dir, archive_file) with self.assertRaisesRegex(Exception, 'could not be added to the archive'): with mock.patch.object(libcombine.CombineArchive, 'addFile', return_value=False): io.CombineArchiveWriter().run(archive, self.temp_dir, archive_file) with self.assertRaisesRegex(ValueError, 'my error'): with mock.patch( 'biosimulators_utils.combine.io.get_combine_errors_warnings', return_value=([['my error']], [])): io.CombineArchiveWriter().run(archive, self.temp_dir, archive_file) with self.assertWarnsRegex(BioSimulatorsWarning, 'my warning'): with mock.patch( 'biosimulators_utils.combine.io.get_combine_errors_warnings', return_value=([], [['my warning']])): io.CombineArchiveWriter().run(archive, self.temp_dir, archive_file) archive_file = os.path.join(os.path.dirname(__file__), '..', 'fixtures', 'invalid-parent-format-in-manifest.omex') out_dir = os.path.join(self.temp_dir, 'out-1') with self.assertRaisesRegex(ValueError, 'format of the archive must be'): io.CombineArchiveReader().run(archive_file, out_dir, include_omex_metadata_files=False) archive_file = os.path.join(os.path.dirname(__file__), '..', 'fixtures', 'missing-parent-in-manifest.omex') out_dir = os.path.join(self.temp_dir, 'out-2') with self.assertWarnsRegex( BioSimulatorsWarning, 'Manifests should include their parent COMBINE/OMEX archives'): io.CombineArchiveReader().run(archive_file, out_dir, include_omex_metadata_files=False)