def test_read_omex_meta_file(self):
        triples, errors, warnings = io.read_omex_meta_file(
            self.FIXTURE,
            config=Config(
                OMEX_METADATA_SCHEMA=data_model.OmexMetadataSchema.rdf_triples,
            ))
        self.assertEqual(errors, [])
        self.assertEqual(warnings, [])

        metadata, errors, warnings = io.read_omex_meta_file(
            self.FIXTURE,
            config=Config(
                OMEX_METADATA_SCHEMA=data_model.OmexMetadataSchema.biosimulations,
            ),
            working_dir=self.dir_name)
        self.assertEqual(errors, [])
        self.assertEqual(warnings, [])

        metadata, errors, warnings = io.read_omex_meta_file(
            self.FIXTURE,
            config=Config(
                OMEX_METADATA_SCHEMA=None,
            ))
        self.assertEqual(metadata, None)
        self.assertIn('is not supported', flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])
    def test_read_from_plain_zip_archive(self):
        in_dir = os.path.join(self.temp_dir, 'in')
        os.mkdir(in_dir)
        sim_path = os.path.join(in_dir, 'simulation.sedml')
        model_path = os.path.join(in_dir, 'model.xml')
        archive_filename = os.path.join(in_dir, 'archive.zip')
        with open(sim_path, 'w'):
            pass
        with open(model_path, 'w'):
            pass

        archive = Archive(files=[
            ArchiveFile(local_path=sim_path, archive_path='simulation.sedml'),
            ArchiveFile(local_path=model_path, archive_path='model.xml'),
        ])

        ArchiveWriter().run(archive, archive_filename)

        zip_out_dir = os.path.join(self.temp_dir, 'out_zip')
        with self.assertRaisesRegex(ValueError,
                                    'not a valid COMBINE/OMEX archive'):
            io.CombineArchiveReader().run(archive_filename, zip_out_dir)

        config = Config(VALIDATE_OMEX_MANIFESTS=False)
        archive = io.CombineArchiveReader().run(archive_filename,
                                                zip_out_dir,
                                                config=config)

        combine_archive = io.CombineArchiveZipReader().run(
            archive_filename, zip_out_dir)

        expected_combine_archive = data_model.CombineArchive(contents=[
            data_model.CombineArchiveContent(
                location='simulation.sedml',
                format=data_model.CombineArchiveContentFormat.SED_ML.value),
            data_model.CombineArchiveContent(location='model.xml'),
        ])
        self.assertTrue(combine_archive.is_equal(expected_combine_archive))

        combine_out_dir = os.path.join(self.temp_dir, 'out_combine')
        config = Config(VALIDATE_OMEX_MANIFESTS=False)
        combine_archive = io.CombineArchiveReader().run(archive_filename,
                                                        combine_out_dir,
                                                        config=config)
        self.assertTrue(combine_archive.is_equal(expected_combine_archive))

        # error handling
        with self.assertRaisesRegex(ValueError, 'not a valid zip archive'):
            io.CombineArchiveZipReader().run(sim_path, zip_out_dir)

        config = Config(VALIDATE_OMEX_MANIFESTS=False)
        with self.assertRaisesRegex(ValueError,
                                    'not a valid COMBINE/OMEX archive'):
            io.CombineArchiveReader().run(sim_path, zip_out_dir, config=config)

        config = Config(VALIDATE_OMEX_MANIFESTS=True)
        with self.assertRaisesRegex(ValueError,
                                    'not a valid COMBINE/OMEX archive'):
            io.CombineArchiveReader().run(sim_path, zip_out_dir, config=config)
    def test_TriplesOmexMetaWriter_run(self):
        triples, errors, _ = io.TriplesOmexMetaReader().run(self.FIXTURE)
        self.assertEqual(errors, [])

        filename2 = os.path.join(self.dir_name, 'md2.rdf')
        filename3 = os.path.join(self.dir_name, 'md3.rdf')
        io.TriplesOmexMetaWriter().run(
            triples, filename2,
            config=Config(OMEX_METADATA_OUTPUT_FORMAT=data_model.OmexMetadataOutputFormat.rdfxml),
        )
        io.TriplesOmexMetaWriter().run(triples, filename3)

        md, errors, _ = io.BiosimulationsOmexMetaReader().run(self.FIXTURE)
        self.assertEqual(errors, [])
        md2, errors, _ = io.BiosimulationsOmexMetaReader().run(filename2)
        self.assertEqual(errors, [])
        md3, errors, _ = io.BiosimulationsOmexMetaReader().run(filename3)
        self.assertEqual(errors, [])
        for key in md[0].keys():
            if isinstance(md[0][key], list) and md[0][key]:
                if isinstance(md[0][key][0], str):
                    md2[0][key].sort()
                    md[0][key].sort()
                elif isinstance(md[0][key][0], dict) and 'uri' in md[0][key][0]:
                    md2[0][key].sort(key=lambda obj: (obj['uri'], obj['label']))
                    md[0][key].sort(key=lambda obj: (obj['uri'], obj['label']))
            self.assertEqual(md2[0][key], md[0][key], key)
        self.assertEqual(md2, md)
        self.assertEqual(md3, md)

        filename = os.path.join(self.dir_name, 'md')
        io.TriplesOmexMetaWriter().run(triples, filename,
                                       config=Config(OMEX_METADATA_OUTPUT_FORMAT=data_model.OmexMetadataOutputFormat.turtle)
                                       )

        filename = os.path.join(self.dir_name, 'md.xml')
        io.TriplesOmexMetaWriter().run(triples, filename,
                                       config=Config(OMEX_METADATA_OUTPUT_FORMAT=data_model.OmexMetadataOutputFormat.rdfxml_abbrev)
                                       )

        md2, errors, _ = io.BiosimulationsOmexMetaReader().run(filename)
        self.assertEqual(errors, [])
        for key in md[0].keys():
            if isinstance(md[0][key], list) and md[0][key]:
                if isinstance(md[0][key][0], str):
                    md2[0][key].sort()
                    md[0][key].sort()
                elif isinstance(md[0][key][0], dict) and 'uri' in md[0][key][0]:
                    md2[0][key].sort(key=lambda obj: (obj['uri'], obj['label']))
                    md[0][key].sort(key=lambda obj: (obj['uri'], obj['label']))
            self.assertEqual(md2[0][key], md[0][key], key)
        self.assertEqual(md2, md)
Ejemplo n.º 4
0
    def validate_archive(self, filename):
        reader = CombineArchiveReader()
        name = os.path.relpath(filename, EXAMPLES_DIR)
        temp_dirname = os.path.join(self.temp_dirname, name)
        if not os.path.isdir(temp_dirname):
            os.makedirs(temp_dirname)
        archive = reader.run(filename, temp_dirname)

        config = Config(
            OMEX_METADATA_SCHEMA=OmexMetadataSchema.biosimulations, )

        error_msgs, warning_msgs = validate(
            archive,
            temp_dirname,
            formats_to_validate=list(
                CombineArchiveContentFormat.__members__.values()),
            config=config,
        )

        if warning_msgs:
            msg = 'The COMBINE/OMEX archive may be invalid.\n  {}'.format(
                flatten_nested_list_of_strings(warning_msgs).replace(
                    '\n', '\n  '))
            warnings.warn(msg, BioSimulatorsWarning)

        if error_msgs:
            msg = 'The COMBINE/OMEX archive is not valid.\n  {}'.format(
                flatten_nested_list_of_strings(error_msgs).replace(
                    '\n', '\n  '))
            raise ValueError(msg)
    def test_validate_no_metadata(self):
        os.remove(os.path.join(self.tmp_dir, 'thumbnail.png'))

        config = Config(VALIDATE_OMEX_METADATA=True)
        archive = CombineArchiveReader().run(os.path.join(
            self.FIXTURES_DIR, 'no-metadata.omex'),
                                             self.tmp_dir,
                                             config=config)
        errors, warnings = validate(archive, self.tmp_dir, config=config)
        self.assertEqual(errors, [])

        config = Config(VALIDATE_OMEX_METADATA=False)
        archive = CombineArchiveReader().run(os.path.join(
            self.FIXTURES_DIR, 'no-metadata.omex'),
                                             self.tmp_dir,
                                             config=config)
        errors, warnings = validate(archive, self.tmp_dir, config=config)
        self.assertEqual(errors, [])

        config = Config(VALIDATE_OMEX_METADATA=True)
        archive = CombineArchiveReader().run(os.path.join(
            self.FIXTURES_DIR, 'no-metadata.omex'),
                                             self.tmp_dir,
                                             config=config)
        errors, warnings = validate(
            archive,
            self.tmp_dir,
            formats_to_validate=list(
                CombineArchiveContentFormat.__members__.values()),
            config=config)
        self.assertNotEqual(errors, [])

        config = Config(VALIDATE_OMEX_METADATA=False)
        archive = CombineArchiveReader().run(os.path.join(
            self.FIXTURES_DIR, 'no-metadata.omex'),
                                             self.tmp_dir,
                                             config=config)
        errors, warnings = validate(
            archive,
            self.tmp_dir,
            formats_to_validate=list(
                CombineArchiveContentFormat.__members__.values()),
            config=config)
        self.assertEqual(errors, [])
    def test_read_manifest_from_plain_zip(self):
        in_dir = os.path.join(self.temp_dir, 'in')
        os.mkdir(in_dir)
        sim_path = os.path.join(in_dir, 'simulation.sedml')
        model_path = os.path.join(in_dir, 'model.xml')
        archive_filename = os.path.join(in_dir, 'archive.zip')
        with open(sim_path, 'w'):
            pass
        with open(model_path, 'w'):
            pass

        archive = Archive(files=[
            ArchiveFile(local_path=sim_path, archive_path='simulation.sedml'),
            ArchiveFile(local_path=model_path, archive_path='model.xml'),
        ])

        ArchiveWriter().run(archive, archive_filename)

        zip_out_dir = os.path.join(self.temp_dir, 'out_zip')

        config = Config(VALIDATE_OMEX_MANIFESTS=False)
        archive = io.CombineArchiveReader().run(archive_filename,
                                                zip_out_dir,
                                                config=config)
        self.assertEqual(len(archive.contents), 2)

        config = Config(VALIDATE_OMEX_MANIFESTS=True)
        with self.assertRaisesRegex(ValueError,
                                    'not a valid COMBINE/OMEX archive'):
            io.CombineArchiveReader().run(archive_filename,
                                          zip_out_dir,
                                          config=config)

        manifest_filename = os.path.join(in_dir, 'manifest.xml')

        config = Config(VALIDATE_OMEX_MANIFESTS=False)
        archive.contents = io.CombineArchiveReader().read_manifest(
            manifest_filename, archive_filename, config=config)
        self.assertEqual(len(archive.contents), 3)

        config = Config(VALIDATE_OMEX_MANIFESTS=True)
        archive.contents = io.CombineArchiveReader().read_manifest(
            manifest_filename, archive_filename, config=config)
        self.assertEqual(len(archive.contents), 0)
    def test_write_omex_meta_file(self):
        config = Config(
            OMEX_METADATA_SCHEMA=data_model.OmexMetadataSchema.rdf_triples,
        )
        triples, errors, _ = io.read_omex_meta_file(self.FIXTURE, config=config)
        self.assertEqual(errors, [])
        filename = os.path.join(self.dir_name, 'md.rdf')
        io.write_omex_meta_file(triples, filename, config=config)

        md, errors, _ = io.BiosimulationsOmexMetaReader().run(self.FIXTURE)
        self.assertEqual(errors, [])
        filename = os.path.join(self.dir_name, 'md.rdf')
        config.OMEX_METADATA_SCHEMA = data_model.OmexMetadataSchema.biosimulations
        io.write_omex_meta_file(md, filename, config=config)

        io.write_omex_meta_file(md, filename)

        config.OMEX_METADATA_SCHEMA = None
        with self.assertRaises(NotImplementedError):
            io.write_omex_meta_file(triples, filename, config=config)
    def test_get_algorithm_substitution_policy(self):
        self.assertEqual(utils.get_algorithm_substitution_policy(),
                         AlgorithmSubstitutionPolicy.SIMILAR_VARIABLES)

        with mock.patch.dict(os.environ,
                             {'ALGORITHM_SUBSTITUTION_POLICY': 'ANY'}):
            self.assertEqual(utils.get_algorithm_substitution_policy(),
                             AlgorithmSubstitutionPolicy.ANY)

        with mock.patch.dict(os.environ,
                             {'ALGORITHM_SUBSTITUTION_POLICY': 'UNDEFINED'}):
            with self.assertRaises(ValueError):
                utils.get_algorithm_substitution_policy()

        with self.assertRaisesRegex(ValueError, 'is not a valid value'):
            utils.get_algorithm_substitution_policy(config=Config(
                ALGORITHM_SUBSTITUTION_POLICY=None))
    def test_validate_omex_meta_file(self):
        config = Config(OMEX_METADATA_SCHEMA=OmexMetadataSchema.rdf_triples)

        _, errors, warnings = read_omex_meta_file(os.path.join(
            self.OMEX_META_FIXTURES_DIR, 'libcombine.rdf'),
                                                  working_dir=self.tmp_dir,
                                                  config=config)
        self.assertEqual(errors, [])
        self.assertEqual(warnings, [])

        _, errors, warnings = read_omex_meta_file(os.path.join(
            self.OMEX_META_FIXTURES_DIR, 'biosimulations.rdf'),
                                                  working_dir=self.tmp_dir,
                                                  config=config)
        self.assertEqual(errors, [])
        self.assertEqual(warnings, [])

        _, errors, warnings = read_omex_meta_file(os.path.join(
            self.OMEX_META_FIXTURES_DIR, 'warning.rdf'),
                                                  working_dir=self.tmp_dir,
                                                  config=config)
        self.assertIn("Unsupported version '1.2'",
                      flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])

        _, errors, warnings = read_omex_meta_file(os.path.join(
            self.OMEX_META_FIXTURES_DIR, 'invalid.rdf'),
                                                  working_dir=self.tmp_dir,
                                                  config=config)
        self.assertEqual(len(errors), 4)
        self.assertIn("XML parser error",
                      flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])

        _, errors, warnings = read_omex_meta_file(os.path.join(
            self.OMEX_META_FIXTURES_DIR, 'malformed.rdf'),
                                                  working_dir=self.tmp_dir,
                                                  config=config)
        self.assertEqual(len(errors), 3)
        self.assertIn("Opening and ending tag mismatch",
                      flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])
    def test_validate_biosimulations_metadata_for_uri(self):
        config = Config(
            OMEX_METADATA_SCHEMA=data_model.OmexMetadataSchema.biosimulations)
        md, errors, _ = read_omex_meta_file(
            [self.FIXTURE_THIRD_A, self.FIXTURE_THIRD_B, self.FIXTURE_THIRD_C],
            config=config)
        self.assertEqual(errors, [])
        self.assertEqual(len(md), 2)

        md, errors, _ = read_omex_meta_file(
            [self.FIXTURE_THIRD_A, self.FIXTURE_THIRD_B], config=config)
        self.assertEqual(errors, [])
        self.assertEqual(len(md), 1)

        md, errors, _ = read_omex_meta_file(
            [self.FIXTURE_THIRD_A, self.FIXTURE_THIRD_C], config=config)
        self.assertEqual(errors, [])
        self.assertEqual(len(md), 2)

        md, errors, _ = read_omex_meta_file(
            [self.FIXTURE_THIRD_B, self.FIXTURE_THIRD_C], config=config)
        self.assertIn('is required', flatten_nested_list_of_strings(errors))
        self.assertEqual(len(md), 2)

        md, errors, _ = read_omex_meta_file([self.FIXTURE_THIRD_A],
                                            config=config)
        self.assertEqual(errors, [])
        self.assertEqual(len(md), 1)

        md, errors, _ = read_omex_meta_file([self.FIXTURE_THIRD_B],
                                            config=config)
        self.assertIn('is required', flatten_nested_list_of_strings(errors))
        self.assertEqual(len(md), 1)

        md, errors, _ = read_omex_meta_file([self.FIXTURE_THIRD_C],
                                            config=config)
        self.assertIn('does not contain information',
                      flatten_nested_list_of_strings(errors))
        self.assertEqual(len(md), 1)
    def test_BiosimulationsOmexMetaWriter_run(self):
        md, errors, _ = io.BiosimulationsOmexMetaReader().run(self.FIXTURE)
        self.assertEqual(errors, [])

        filename2 = os.path.join(self.dir_name, 'md2.rdf')
        filename3 = os.path.join(self.dir_name, 'md3.rdf')
        io.BiosimulationsOmexMetaWriter().run(md, filename2,
                                              config=Config(OMEX_METADATA_OUTPUT_FORMAT=data_model.OmexMetadataOutputFormat.rdfxml)
                                              )
        io.BiosimulationsOmexMetaWriter().run(md, filename3)

        md2, errors, _ = io.BiosimulationsOmexMetaReader().run(filename2)
        md3, errors, _ = io.BiosimulationsOmexMetaReader().run(filename3)
        self.assertEqual(errors, [])
        for key in md[0].keys():
            if isinstance(md[0][key], list) and md[0][key]:
                if isinstance(md[0][key][0], str):
                    md2[0][key].sort()
                    md[0][key].sort()
                elif isinstance(md[0][key][0], dict) and 'uri' in md[0][key][0]:
                    md2[0][key].sort(key=lambda obj: (obj['uri'], obj['label']))
                    md[0][key].sort(key=lambda obj: (obj['uri'], obj['label']))
            self.assertEqual(md2[0][key], md[0][key], key)
        self.assertEqual(md2, md)
        self.assertEqual(md3, md)

        filename = os.path.join(self.dir_name, 'md.rdf')
        md[0]['title'] = None
        md[0]['other'].append({
            'attribute': {
                'uri': 'http://www.collex.org/schema#thumbnail',
                'label': 'Image',
            },
            'value': {
                'uri': 'https://website.com/image.png',
                'label': 'Big image',
            },
        })
        io.BiosimulationsOmexMetaWriter().run(md, filename,
                                              config=Config(OMEX_METADATA_OUTPUT_FORMAT=data_model.OmexMetadataOutputFormat.rdfxml))

        md, errors, _ = io.BiosimulationsOmexMetaReader().run(
            os.path.join(self.FIXTURE_DIR, 'biosimulations-with-file-annotations.rdf'))
        self.assertEqual(errors, [])
        filename = os.path.join(self.dir_name, 'md.rdf')
        io.BiosimulationsOmexMetaWriter().run(md, filename,
                                              config=Config(OMEX_METADATA_OUTPUT_FORMAT=data_model.OmexMetadataOutputFormat.rdfxml))
        md2, errors, _ = io.BiosimulationsOmexMetaReader().run(filename)
        self.assertEqual(errors, [])

        md.sort(key=lambda file: file['uri'])
        md2.sort(key=lambda file: file['uri'])
        for i in range(len(md)):
            for key in md[i].keys():
                if isinstance(md[i][key], list) and md[i][key]:
                    if isinstance(md[i][key][0], str):
                        md2[i][key].sort()
                        md[i][key].sort()
                    elif isinstance(md[i][key][0], dict) and 'uri' in md[i][key][0]:
                        md2[i][key].sort(key=lambda obj: (obj['uri'], obj['label']))
                        md[i][key].sort(key=lambda obj: (obj['uri'], obj['label']))
        self.assertEqual(md2, md)
Ejemplo n.º 12
0
def handler(body, file=None):
    ''' Validate a model

    Args:
        body (:obj:`dict`): dictionary in schema ``ValidateModelFileOrUrl`` with keys

            * ``url`` whose value has schema ``Url`` with the URL for a model file
            * ``language`` (:obj:`str`): language of the model

        file (:obj:`werkzeug.datastructures.FileStorage`): model file

    Returns:
        ``ValidationReport``: information about the validity or
            lack thereof of the model
    '''
    has_language = False
    for model_language in MODEL_LANGUAGES:
        if body['language'] == model_language.name:
            has_language = True
            break
    if not has_language:  # pragma: no cover: unreachable due to OpenAPI validation
        title = (
            'Model language `{}` is not not supported. Model language must be one of {}'
        ).format(
            body['language'], ', '.join(model_language.name
                                        for model_language in MODEL_LANGUAGES))
        raise BadRequestException(
            title=title,
            exception=NotImplementedError(),
        )

    model_file = file
    model_url = body.get('url', None)
    if model_url and model_file:
        raise BadRequestException(
            title='Only one of `file` or `url` can be used at a time.',
            instance=ValueError(),
        )
    if not model_url and not model_file:
        raise BadRequestException(
            title='One of `file` or `url` must be used.',
            instance=ValueError(),
        )

    # create temporary file
    model_filename = get_temp_file()

    # get model
    if model_file:
        model_file.save(model_filename)

    else:
        try:
            response = requests.get(model_url)
            response.raise_for_status()
        except requests.exceptions.RequestException as exception:
            title = 'Model could not be loaded from `{}`'.format(model_url)
            raise BadRequestException(
                title=title,
                instance=exception,
            )

        # save model to local temporary file
        with open(model_filename, 'wb') as file:
            file.write(response.content)

    # validate model
    config = Config(VALIDATE_IMPORTED_MODEL_FILES=False, )
    errors, warnings, _ = validate_model_with_language(model_filename,
                                                       model_language,
                                                       config=config)
    return make_validation_report(errors, warnings, filenames=[model_filename])
    def test_validate_biosimulations_metadata_for_uri(self):
        config = Config(
            OMEX_METADATA_SCHEMA=data_model.OmexMetadataSchema.biosimulations)
        md, _, _ = read_omex_meta_file(self.FIXTURE, config=config)
        md = md[0]

        errors, warnings = validate_biosimulations_metadata_for_uri(md)
        self.assertEqual(errors, [])
        self.assertIn('thumbnails could not be validated',
                      flatten_nested_list_of_strings(warnings))

        errors, warnings = validate_biosimulations_metadata_for_uri(
            md, working_dir=self.dir_name)
        self.assertIn('is not a file', flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])

        shutil.copyfile(
            os.path.join(self.FIXTURES_DIR, 'images',
                         'PNG_transparency_demonstration_1.png'),
            os.path.join(self.dir_name, 'thumbnail.png'))
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md, working_dir=self.dir_name)
        self.assertEqual(errors, [])
        self.assertEqual(warnings, [])

        shutil.copyfile(
            os.path.join(self.FIXTURES_DIR, 'images',
                         'PNG_transparency_demonstration_1.png'),
            os.path.join(self.dir_name, 'thumbnail.png'))
        archive = CombineArchive(contents=[
            CombineArchiveContent(
                location='thumbnail.png',
                format=CombineArchiveContentFormat.PNG.value,
            ),
        ])
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md,
            validate_minimal_metadata=True,
            archive=archive,
            working_dir=self.dir_name)
        self.assertEqual(errors, [])
        self.assertEqual(warnings, [])

        archive.contents[0].format = CombineArchiveContentFormat.PDF.value
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md,
            validate_minimal_metadata=True,
            archive=archive,
            working_dir=self.dir_name)
        self.assertNotEqual(errors, [])
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        md2['title'] = None
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2, validate_minimal_metadata=True, working_dir=self.dir_name)
        self.assertIn('is required', flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        md2['creators'][0]['uri'] = 'xyz'
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2, validate_minimal_metadata=True, working_dir=self.dir_name)
        self.assertIn('is not a valid URI',
                      flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        md2['created'] = 'xyz'
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2, validate_minimal_metadata=True, working_dir=self.dir_name)
        self.assertIn('is not a valid date',
                      flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        md2['modified'].append('xyz')
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2, validate_minimal_metadata=True, working_dir=self.dir_name)
        self.assertIn('is not a valid date',
                      flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        md2['creators'][0]['uri'] = 'http://identifiers.org/pubmed:1234'
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2, validate_minimal_metadata=True, working_dir=self.dir_name)
        self.assertEqual(errors, [])
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        md2['creators'][0]['uri'] = 'http://identifiers.org/pubmed/1234'
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2, validate_minimal_metadata=True, working_dir=self.dir_name)
        self.assertEqual(errors, [])
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        md2['creators'][0]['uri'] = 'http://identifiers.org/PubMed:1234'
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2, validate_minimal_metadata=True, working_dir=self.dir_name)
        self.assertEqual(errors, [])
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        md2['creators'][0]['uri'] = 'http://identifiers.org/ncbi/pubmed:1234'
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2, validate_minimal_metadata=True, working_dir=self.dir_name)
        self.assertEqual(errors, [])
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        md2['creators'][0]['uri'] = 'http://identifiers.org/NCBI/pubmed:1234'
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2, validate_minimal_metadata=True, working_dir=self.dir_name)
        self.assertEqual(errors, [])
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        md2['creators'][0][
            'uri'] = 'http://identifiers.org/not-a-namespace:invalid'
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2, validate_minimal_metadata=True, working_dir=self.dir_name)
        self.assertIn('is not a valid prefix',
                      flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        md2['creators'][0]['uri'] = 'http://identifiers.org/pubmed:invalid'
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2, validate_minimal_metadata=True, working_dir=self.dir_name)
        self.assertIn('is not valid for',
                      flatten_nested_list_of_strings(errors))
        self.assertIn('not a valid Identifiers.org identifier',
                      flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        md2['creators'][0]['uri'] = 'http://identifiers.org/ncbi:pubmed:1234'
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2, validate_minimal_metadata=True, working_dir=self.dir_name)
        self.assertIn('is not a valid prefix',
                      flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        md2['thumbnails'][0] = 'x'
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2, validate_minimal_metadata=True, working_dir=self.dir_name)
        self.assertIn('is not a file', flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2,
            validate_minimal_metadata=True,
            working_dir=self.dir_name,
            archive=CombineArchive(contents=[
                CombineArchiveContent(
                    location=os.path.relpath(md2['thumbnails'][0], '.'),
                    format=CombineArchiveContentFormat.TEXT.value,
                )
            ]))
        self.assertIn('must be one of the following',
                      flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        md2['created'] = 'x'
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2, validate_minimal_metadata=True, working_dir=self.dir_name)
        self.assertIn('not a valid date',
                      flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])

        md2 = copy.deepcopy(md)
        md2['modified'][0] = 'x'
        errors, warnings = validate_biosimulations_metadata_for_uri(
            md2, validate_minimal_metadata=True, working_dir=self.dir_name)
        self.assertIn('not a valid date',
                      flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])
Ejemplo n.º 14
0
def exec_plot_output_sed_doc(omex_file_path, base_out_path):
    config = Config(VALIDATE_OMEX_MANIFESTS=False)
    archive = CombineArchiveReader().run(in_file=omex_file_path,
                                         out_dir=tmp_dir,
                                         config=config)

    # determine files to execute
    sedml_contents = get_sedml_contents(archive)

    report_results = ReportResults()
    for i_content, content in enumerate(sedml_contents):
        content_filename = os.path.join(tmp_dir, content.location)

        for report_filename in glob.glob(
                os.path.join(base_out_path, content.location, '*.csv')):
            if report_filename.find('__plot__') != -1:
                report_id = os.path.splitext(
                    os.path.basename(report_filename))[0]

                # read report from CSV file produced by tellurium
                # data_set_df = pd.read_csv(report_filename).transpose()

                data_set_df = pd.read_csv(report_filename, header=None).T

                datasets = []
                for col in data_set_df.columns:
                    datasets.append(
                        DataSet(id=data_set_df.loc[0, col],
                                label=data_set_df.loc[1, col],
                                name=data_set_df.loc[2, col]))
                report = Report(id=report_id,
                                name=report_id,
                                data_sets=datasets)

                data_set_df.columns = data_set_df.iloc[0]
                data_set_df.drop(0, inplace=True)
                data_set_df.drop(1, inplace=True)
                data_set_df.drop(2, inplace=True)
                data_set_df.reset_index(inplace=True)
                data_set_df.drop('index', axis=1, inplace=True)

                # create pseudo-report for ReportWriter

                data_set_results = DataSetResults()

                for col in list(data_set_df.columns):
                    data_set_results[col] = data_set_df[col].to_numpy(
                        dtype='float64')

                # append to data structure of report results

                # save file in desired BioSimulators format(s)
                export_id = report_id.replace('__plot__', '')
                report.id = export_id
                rel_path = os.path.join(content.location, report.id)
                if len(rel_path.split("./")) > 1:
                    rel_path = rel_path.split("./")[1]
                # print("base: ", base_out_path, file=sys.stdout)
                # print("rel: ", rel_path, file=sys.stdout)
                ReportWriter().run(report,
                                   data_set_results,
                                   base_out_path,
                                   rel_path,
                                   format='h5',
                                   type=Plot2D)
                os.rename(report_filename,
                          report_filename.replace('__plot__', ''))

            else:
                print("report   : ", report_filename, file=sys.stdout)
                report_id = os.path.splitext(
                    os.path.basename(report_filename))[0]
                data_set_df = pd.read_csv(report_filename, header=None).T

                datasets = []
                for col in data_set_df.columns:
                    datasets.append(
                        DataSet(id=data_set_df.loc[0, col],
                                label=data_set_df.loc[1, col],
                                name=""))
                report = Report(id=report_id,
                                name=report_id,
                                data_sets=datasets)

                data_set_df.columns = data_set_df.iloc[0]  # use ids
                data_set_df.drop(0, inplace=True)
                data_set_df.drop(1, inplace=True)
                data_set_df.drop(2, inplace=True)
                data_set_df.reset_index(inplace=True)
                data_set_df.drop('index', axis=1, inplace=True)

                data_set_results = DataSetResults()
                for col in list(data_set_df.columns):
                    data_set_results[col] = data_set_df[col].to_numpy(
                        dtype='float64')

                rel_path = os.path.join(content.location, report.id)
                if len(rel_path.split("./")) > 1:
                    rel_path = rel_path.split("./")[1]
                ReportWriter().run(report,
                                   data_set_results,
                                   base_out_path,
                                   rel_path,
                                   format='h5',
                                   type=Report)
Ejemplo n.º 15
0
def gen_sedml_2d_3d(omex_file_path, base_out_path):

    temp_path = os.path.join(base_out_path, "temp")
    if not os.path.exists(temp_path):
        os.mkdir(temp_path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)

    # defining archive
    config = Config(VALIDATE_OMEX_MANIFESTS=True)
    archive = CombineArchiveReader().run(in_file=omex_file_path,
                                         out_dir=temp_path,
                                         config=config)

    # determine files to execute
    sedml_contents = get_sedml_contents(archive)

    for i_content, content in enumerate(sedml_contents):
        content_filename = os.path.join(temp_path, content.location)
        if '/' in content.location:
            sedml_name = content.location.split('/')[1].split('.')[0]
        else:
            sedml_name = content.location.split('.')[0]


#        sedml_name = Path(content.location).stem
        print("name: ", sedml_name, file=sys.stdout)

        doc = SedmlSimulationReader().run(content_filename)
        for output in doc.outputs:
            if isinstance(output, (Plot2D, Plot3D)):
                report = Report(id='__plot__' + output.id, name=output.name)

                data_generators = {}
                if isinstance(output, Plot2D):
                    for curve in output.curves:
                        data_generators[
                            curve.x_data_generator.id] = curve.x_data_generator
                        data_generators[
                            curve.y_data_generator.id] = curve.y_data_generator

                elif isinstance(output, Plot3D):
                    for surface in output.surfaces:
                        data_generators[surface.x_data_generator.
                                        id] = surface.x_data_generator
                        data_generators[surface.y_data_generator.
                                        id] = surface.y_data_generator
                        data_generators[surface.z_data_generator.
                                        id] = surface.z_data_generator

                for data_generator in data_generators.values():
                    report.data_sets.append(
                        DataSet(
                            id='__data_set__{}_{}'.format(
                                output.id, data_generator.id),
                            name=data_generator.name,
                            label=data_generator.id,
                            data_generator=data_generator,
                        ))

                report.data_sets.sort(key=lambda data_set: data_set.id)
                doc.outputs.append(report)

        filename_with_reports_for_plots = os.path.join(
            temp_path, f'simulation_{sedml_name}.sedml')
        SedmlSimulationWriter().run(doc,
                                    filename_with_reports_for_plots,
                                    validate_models_with_languages=False)
Ejemplo n.º 16
0
def exec_sed_doc(omex_file_path, base_out_path):
    # defining archive
    config = Config(VALIDATE_OMEX_MANIFESTS=False)
    archive = CombineArchiveReader().run(in_file=omex_file_path,
                                         out_dir=tmp_dir,
                                         config=config)

    # determine files to execute
    sedml_contents = get_sedml_contents(archive)

    report_results = ReportResults()
    for i_content, content in enumerate(sedml_contents):
        content_filename = os.path.join(tmp_dir, content.location)

        doc = SedmlSimulationReader().run(content_filename)

        for report_filename in glob.glob(
                os.path.join(base_out_path, content.location, '*.csv')):
            report_id = os.path.splitext(os.path.basename(report_filename))[0]

            # read report from CSV file produced by VCell
            data_set_df = pd.read_csv(report_filename).transpose()
            data_set_df.columns = data_set_df.iloc[0]
            data_set_df = data_set_df.drop(data_set_df.iloc[0].name)
            data_set_df = data_set_df.reset_index()
            data_set_df = data_set_df.rename(
                columns={'index': data_set_df.columns.name})
            data_set_df = data_set_df.transpose()
            data_set_df.index.name = None

            report = next(report for report in doc.outputs
                          if report.id == report_id)

            data_set_results = DataSetResults()

            # print("report: ", report, file=sys.stderr)
            # print("report Type: ", type(report), file=sys.stderr)
            # print("Plot Type: ", Plot2D, file=sys.stderr)
            if type(report) != Plot2D and type(report) != Plot3D:
                # Considering the scenario where it has the datasets in sedml
                for data_set in report.data_sets:
                    data_set_results[data_set.id] = data_set_df.loc[
                        data_set.label, :].to_numpy(dtype='float64')
                    # print("DF for report: ", data_set_results[data_set.id], file=sys.stderr)
                    # print("df.types: ", data_set_results[data_set.id].dtype, file=sys.stderr)
            else:
                data_set_df = pd.read_csv(report_filename, header=None).T
                data_set_df.columns = data_set_df.iloc[0]
                data_set_df.drop(0, inplace=True)
                data_set_df.reset_index(inplace=True)
                data_set_df.drop('index', axis=1, inplace=True)
                # print("DF for plot: ", data_set_df, file=sys.stderr)
                # Considering the scenario where it doesn't have datasets in sedml (pseudo sedml for plots)
                for col in list(data_set_df.columns):
                    data_set_results[col] = data_set_df[col].values

            # append to data structure of report results
            report_results[report_id] = data_set_results

            # save file in desired BioSimulators format(s)
            # for report_format in report_formats:
            # print("HDF report: ", report, file=sys.stderr)
            # print("HDF dataset results: ", data_set_results, file=sys.stderr)
            # print("HDF base_out_path: ", base_out_path,file=sys.stderr)
            # print("HDF path: ", os.path.join(content.location, report.id), file=sys.stderr)

            rel_path = os.path.join(content.location, report.id)

            if len(rel_path.split("./")) > 1:
                rel_path = rel_path.split("./")[1]

            if type(report) != Plot2D and type(report) != Plot3D:
                ReportWriter().run(report,
                                   data_set_results,
                                   base_out_path,
                                   rel_path,
                                   format='h5')
            else:
                datasets = []
                for col in list(data_set_df.columns):
                    datasets.append(DataSet(id=col, label=col, name=col))
                report.data_sets = datasets
                ReportWriter().run(report,
                                   data_set_results,
                                   base_out_path,
                                   rel_path,
                                   format='h5')

    # Remove temp directory
    shutil.rmtree(tmp_dir)
Ejemplo n.º 17
0
def handler(body, file=None):
    ''' Validate a COMBINE/OMEX archive

    Args:
        body (:obj:`dict`): dictionary in schema ``ValidateCombineArchiveFileOrUrl`` with keys

            * ``url`` whose value has schema ``Url`` with the URL for a COMBINE/OMEX archive
            * ``omexMetadataFormat`` (:obj:`str`): format of the OMEX Metadata files
            * ``omexMetadataSchema`` (:obj:`str`): schema for validating the OMEX Metadata files
            * ``validateOmexManifest`` (:obj:`bool`, optional): Whether to validate the OMEX manifest file in the archive
            * ``validateSedml`` (:obj:`bool`, optional): Whether to validate the SED-ML files in the archive
            * ``validateSedmlModels`` (:obj:`bool`, optional): Whether to validate the sources of the models in the SED-ML files in the archive
            * ``validateOmexMetadata`` (:obj:`bool`, optional): Whether to validate the OMEX metdata files in the archive according to
                `BioSimulators' conventions <https://docs.biosimulations.org/concepts/conventions/simulation-project-metadata/>`_
            * ``validateImages`` (:obj:`bool`, optional): Whether to validate the images (BMP, GIF, JPEG, PNG, TIFF WEBP) files in the archive

        file (:obj:`werkzeug.datastructures.FileStorage`): COMBINE/OMEX archive file

    Returns:
        ``ValidationReport``: information about the validity or
            lack thereof of a COMBINE/OMEX archive
    '''
    try:
        omexMetadataInputFormat = OmexMetadataInputFormat(
            body['omexMetadataFormat'])
    except ValueError as exception:
        raise BadRequestException(
            title='`omexMetadataFormat` must be a recognized format.',
            exception=exception)

    try:
        omexMetadataSchema = OmexMetadataSchema(body['omexMetadataSchema'])
    except ValueError as exception:
        raise BadRequestException(
            title='`omexMetadataSchema` must be a recognized schema.',
            exception=exception)

    config = Config(
        OMEX_METADATA_INPUT_FORMAT=omexMetadataInputFormat,
        OMEX_METADATA_SCHEMA=omexMetadataSchema,
        VALIDATE_OMEX_MANIFESTS=body.get('validateOmexManifest', True),
        VALIDATE_SEDML=body.get('validateSedml', True),
        VALIDATE_SEDML_MODELS=body.get('validateSedmlModels', True),
        VALIDATE_OMEX_METADATA=body.get('validateOmexMetadata', True),
        VALIDATE_IMAGES=body.get('validateImages', True),
    )

    archive_file = file
    archive_url = body.get('url', None)
    if archive_url and archive_file:
        raise BadRequestException(
            title='Only one of `file` or `url` can be used at a time.',
            instance=ValueError(),
        )
    if not archive_url and not archive_file:
        raise BadRequestException(
            title='One of `file` or `url` must be used.',
            instance=ValueError(),
        )

    # create temporary working directory
    temp_dirname = get_temp_dir()
    archive_filename = os.path.join(temp_dirname, 'archive.omex')

    # get COMBINE/OMEX archive
    if archive_file:
        archive_file.save(archive_filename)

    else:
        try:
            response = requests.get(archive_url)
            response.raise_for_status()
        except requests.exceptions.RequestException as exception:
            title = 'COMBINE/OMEX archive could not be loaded from `{}`'.format(
                archive_url)
            raise BadRequestException(
                title=title,
                instance=exception,
            )

        # save archive to local temporary file
        with open(archive_filename, 'wb') as file:
            file.write(response.content)

    # read archive
    archive_dirname = os.path.join(temp_dirname, 'archive')
    reader = CombineArchiveReader()
    errors = []
    warnings = []
    try:
        archive = reader.run(archive_filename, archive_dirname, config=config)
    except Exception as exception:
        errors = [[
            'The file could not be parsed as a COMBINE/OMEX archive.',
            [[str(exception)]]
        ]]

    if not errors:
        errors, warnings = validate(
            archive,
            archive_dirname,
            formats_to_validate=list(
                CombineArchiveContentFormat.__members__.values()),
            config=config,
        )

    return make_validation_report(errors,
                                  warnings,
                                  filenames=[archive_filename])
Ejemplo n.º 18
0
def handler(body, file=None,
            omexMetadataSchema=OmexMetadataSchema.biosimulations.value):
    ''' Get the metadata about a COMBINE/OMEX archive and its contents

    Args:
        dictionary in schema ``GetCombineArchiveMetadataFileOrUrl`` with keys

            * ``url`` whose value has schema ``Url``
              with the URL for a COMBINE/OMEX archive
            * ``omexMetadataFormat` whose value is a value of :obj:`OmexMetadataInputFormat`

        file (:obj:`werkzeug.datastructures.FileStorage`, optional): COMBINE/OMEX archive file
        omexMetadataSchema (:obj:`str`, optional): schema for validating the OMEX Metadata files

    Returns:
        :obj:`list` of ``BioSimulationsCombineArchiveLocationMetadata``
            or ``RdfTriple``: metadata about a COMBINE/OMEX archive
            and its contents
    '''
    try:
        omexMetadataInputFormat = OmexMetadataInputFormat(body.get('omexMetadataFormat', 'rdfxml'))
    except ValueError as exception:
        raise BadRequestException(title='`omexMetadataFormat` must be a recognized format.', exception=exception)

    try:
        omexMetadataSchema = OmexMetadataSchema(omexMetadataSchema)
    except ValueError as exception:
        raise BadRequestException(title='`omexMetadataSchema` must be a recognized schema.', exception=exception)

    archive_file = file
    archive_url = body.get('url', None)
    if archive_url and archive_file:
        raise BadRequestException(
            title='Only one of `file` or `url` can be used at a time.',
            instance=ValueError(),
        )
    if not archive_url and not archive_file:
        raise BadRequestException(
            title='One of `file` or `url` must be used.',
            instance=ValueError(),
        )

    # create temporary working directory
    temp_dirname = get_temp_dir()
    archive_filename = os.path.join(temp_dirname, 'archive.omex')

    # get COMBINE/OMEX archive
    if archive_file:
        archive_file.save(archive_filename)

    else:
        try:
            response = requests.get(archive_url)
            response.raise_for_status()
        except requests.exceptions.RequestException as exception:
            title = 'COMBINE/OMEX archive could not be loaded from `{}`'.format(
                archive_url)
            raise BadRequestException(
                title=title,
                instance=exception,
            )

        # save archive to local temporary file
        with open(archive_filename, 'wb') as file:
            file.write(response.content)

    # read archive
    archive_dirname = os.path.join(temp_dirname, 'archive')
    try:
        archive = CombineArchiveReader().run(archive_filename, archive_dirname)
    except Exception as exception:
        # return exception
        raise BadRequestException(
            title='`{}` is not a valid COMBINE/OMEX archive'.format(archive_url if archive_url else archive_file.filename),
            instance=exception,
        )

    config = Config(
        OMEX_METADATA_INPUT_FORMAT=omexMetadataInputFormat,
        OMEX_METADATA_SCHEMA=omexMetadataSchema,
    )

    metadata, errors, warnings = read_omex_meta_files_for_archive(archive, archive_dirname, config=config)
    shutil.rmtree(archive_dirname)

    if errors:
        raise BadRequestException(
            title='The metadata for the COMBINE/OMEX archive is not valid.',
            instance=ValueError(),
            validation_report=make_validation_report(errors, warnings, filenames=[archive_filename]),
        )

    # return response
    return metadata
    def test_read_omex_meta_files_for_archive(self):
        shutil.copyfile(os.path.join(self.FIXTURE_DIR, 'biosimulations.rdf'),
                        os.path.join(self.dir_name, 'biosimulations.rdf'))
        shutil.copyfile(os.path.join(self.FIXTURE_DIR, 'biosimulations-with-file-annotations.rdf'),
                        os.path.join(self.dir_name, 'biosimulations-with-file-annotations.rdf'))

        archive = CombineArchive()
        archive.contents = [
            CombineArchiveContent(
                location='biosimulations.rdf',
                format=CombineArchiveContentFormat.OMEX_METADATA,
            ),
            CombineArchiveContent(
                location='biosimulations-with-file-annotations.rdf',
                format=CombineArchiveContentFormat.OMEX_METADATA,
            ),
        ]

        config = Config(
            OMEX_METADATA_SCHEMA=data_model.OmexMetadataSchema.biosimulations,
        )
        md, errors, warnings = io.read_omex_meta_files_for_archive(
            archive, self.dir_name, config=config)
        self.assertIn('The COMBINE archive should only contain one instance of predicate', flatten_nested_list_of_strings(errors))
        self.assertEqual(warnings, [])
        self.assertEqual(len(md), 2)
        self.assertEqual(sorted(m['uri'] for m in md), sorted(['.', './sim.sedml/figure1']))

        md, errors, warnings = io.read_omex_meta_files_for_archive(archive, self.dir_name)
        self.assertIn('The COMBINE archive should only contain one instance of predicate', flatten_nested_list_of_strings(errors))

        shutil.copyfile(os.path.join(self.FIXTURE_DIR, 'biosimulations-abbrev-third-a.rdf'),
                        os.path.join(self.dir_name, 'biosimulations-abbrev-third-a.rdf'))
        shutil.copyfile(os.path.join(self.FIXTURE_DIR, 'biosimulations-abbrev-third-b.rdf'),
                        os.path.join(self.dir_name, 'biosimulations-abbrev-third-b.rdf'))
        shutil.copyfile(os.path.join(self.FIXTURE_DIR, 'biosimulations-abbrev-third-c.rdf'),
                        os.path.join(self.dir_name, 'biosimulations-abbrev-third-c.rdf'))

        archive = CombineArchive()
        archive.contents = [
            CombineArchiveContent(
                location='biosimulations-abbrev-third-a.rdf',
                format=CombineArchiveContentFormat.OMEX_METADATA,
            ),
            CombineArchiveContent(
                location='biosimulations-abbrev-third-b.rdf',
                format=CombineArchiveContentFormat.OMEX_METADATA,
            ),
            CombineArchiveContent(
                location='biosimulations-abbrev-third-c.rdf',
                format=CombineArchiveContentFormat.OMEX_METADATA,
            ),
        ]

        config = Config(
            OMEX_METADATA_SCHEMA=data_model.OmexMetadataSchema.biosimulations,
        )
        md, errors, warnings = io.read_omex_meta_files_for_archive(
            archive, self.dir_name, config=config)
        self.assertEqual(errors, [])
        self.assertEqual(warnings, [])
        self.assertEqual(len(md), 2)
        self.assertEqual(sorted(m['uri'] for m in md), sorted(['.', './sim.sedml/figure1']))
Ejemplo n.º 20
0
def handler(body, file=None):
    ''' Validate metadata about a modeling project or a component of a project

    Args:
        body (:obj:`dict`): dictionary in schema ``ValidateOmexMetadataFileOrUrl`` with keys

            * ``url`` whose value has schema ``Url`` with the URL for a model file
            * ``format`` (:obj:`str`): format of the metadata
            * ``schema`` (:obj:`str`): schema to use to validate the metadata

        file (:obj:`werkzeug.datastructures.FileStorage`): OMEX Metadata file

    Returns:
        ``ValidationReport``: information about the validity or
            lack thereof of the metadata
    '''
    format = OmexMetadataInputFormat(body['format'])
    schema = OmexMetadataSchema(body['schema'])

    metadata_file = file
    metadata_url = body.get('url', None)
    if metadata_url and metadata_file:
        raise BadRequestException(
            title='Only one of `file` or `url` can be used at a time.',
            instance=ValueError(),
        )
    if not metadata_url and not metadata_file:
        raise BadRequestException(
            title='One of `file` or `url` must be used.',
            instance=ValueError(),
        )

    # create temporary file
    metadata_filename = get_temp_file()

    # get metadata
    if metadata_file:
        metadata_file.save(metadata_filename)

    else:
        try:
            response = requests.get(metadata_url)
            response.raise_for_status()
        except requests.exceptions.RequestException as exception:
            title = 'Metadata could not be loaded from `{}`'.format(
                metadata_url)
            raise BadRequestException(
                title=title,
                instance=exception,
            )

        # save metadata to local temporary file
        with open(metadata_filename, 'wb') as file:
            file.write(response.content)

    # validate metadata
    config = Config(
        OMEX_METADATA_INPUT_FORMAT=format,
        OMEX_METADATA_SCHEMA=schema,
    )
    _, errors, warnings = read_omex_meta_file(metadata_filename, config=config)
    return make_validation_report(errors, warnings, filenames=[metadata_filename])