Exemple #1
0
    def get_local_file_hash(self, path: str, local_storage: Storage) -> str:
        """Create md5 hash from file contents."""
        contents = local_storage.open(path).read()
        file_hash = hashlib.md5(contents).hexdigest()

        # Check if content should be gzipped and hash gzipped content
        content_type = mimetypes.guess_type(path)[0] or "application/octet-stream"
        if self.use_gzip and content_type in settings.gzip_content_types:
            file_hash = self.get_gzipped_local_file_hash(file_hash, path, contents)

        return file_hash
Exemple #2
0
def test_write_dandiset_yaml(storage: Storage, version: Version):
    # Pretend like AssetBlob was defined with the given storage
    # The task piggybacks off of the AssetBlob storage to write the yamls
    AssetBlob.blob.field.storage = storage

    write_dandiset_yaml(version)
    expected = YAMLRenderer().render(version.metadata)

    dandiset_yaml_path = (
        f'{settings.DANDI_DANDISETS_BUCKET_PREFIX}'
        f'dandisets/{version.dandiset.identifier}/{version.version}/dandiset.yaml'
    )
    with storage.open(dandiset_yaml_path) as f:
        assert f.read() == expected
Exemple #3
0
def test_write_assets_yaml(storage: Storage, version: Version, asset_factory):
    # Pretend like AssetBlob was defined with the given storage
    # The task piggybacks off of the AssetBlob storage to write the yamls
    AssetBlob.blob.field.storage = storage

    # Create a new asset in the version so there is information to write
    version.assets.add(asset_factory())

    write_assets_yaml(version)
    expected = YAMLRenderer().render(
        [asset.metadata for asset in version.assets.all()])

    assets_yaml_path = (
        f'{settings.DANDI_DANDISETS_BUCKET_PREFIX}'
        f'dandisets/{version.dandiset.identifier}/{version.version}/assets.yaml'
    )
    with storage.open(assets_yaml_path) as f:
        assert f.read() == expected
Exemple #4
0
def test_write_dandiset_yaml_already_exists(storage: Storage,
                                            version: Version):
    # Pretend like AssetBlob was defined with the given storage
    # The task piggybacks off of the AssetBlob storage to write the yamls
    AssetBlob.blob.field.storage = storage

    # Save an invalid file for the task to overwrite
    dandiset_yaml_path = (
        f'{settings.DANDI_DANDISETS_BUCKET_PREFIX}'
        f'dandisets/{version.dandiset.identifier}/{version.version}/dandiset.yaml'
    )
    storage.save(dandiset_yaml_path, ContentFile(b'wrong contents'))

    write_dandiset_yaml(version)
    expected = YAMLRenderer().render(version.metadata)

    with storage.open(dandiset_yaml_path) as f:
        assert f.read() == expected
Exemple #5
0
def test_write_collection_jsonld(storage: Storage, version: Version, asset):
    # Pretend like AssetBlob was defined with the given storage
    # The task piggybacks off of the AssetBlob storage to write the yamls
    AssetBlob.blob.field.storage = storage

    version.assets.add(asset)

    write_collection_jsonld(version)
    expected = JSONRenderer().render({
        '@context': version.metadata['@context'],
        'id': version.metadata['id'],
        '@type': 'prov:Collection',
        'hasMember': [asset.metadata['id']],
    })

    collection_jsonld_path = (
        f'{settings.DANDI_DANDISETS_BUCKET_PREFIX}'
        f'dandisets/{version.dandiset.identifier}/{version.version}/collection.jsonld'
    )
    with storage.open(collection_jsonld_path) as f:
        assert f.read() == expected