def test_version_rest_publish_zarr( api_client, user: User, draft_version: Version, draft_asset_factory, zarr_archive_factory, ): assign_perm('owner', user, draft_version.dandiset) api_client.force_authenticate(user=user) zarr_archive = zarr_archive_factory(dandiset=draft_version.dandiset) zarr_asset: Asset = draft_asset_factory(zarr=zarr_archive, blob=None) normal_asset: Asset = draft_asset_factory() draft_version.assets.add(zarr_asset) draft_version.assets.add(normal_asset) # Validate the metadata to mark the assets and version as `VALID` tasks.validate_asset_metadata(zarr_asset.id) tasks.validate_asset_metadata(normal_asset.id) tasks.validate_version_metadata(draft_version.id) draft_version.refresh_from_db() assert draft_version.valid resp = api_client.post( f'/api/dandisets/{draft_version.dandiset.identifier}' f'/versions/{draft_version.version}/publish/' ) assert resp.status_code == 400 assert resp.json() == ['Cannot publish dandisets which contain zarrs']
def import_versions_from_response(api_url: str, version_api_response: dict, dandiset: Dandiset): """Import versions given a response from /api/dandisets/{identifier}/versions/.""" for result in version_api_response['results']: # get the metadata of this version metadata = requests.get( urljoin( api_url, f'/api/dandisets/{result["dandiset"]["identifier"]}/versions/{result["version"]}/', )).json() click.echo(f' Importing version "{result["version"]}"') version = Version( dandiset=dandiset, name=result['name'], version=result['version'], doi=result.get('doi'), status=Version.Status.PENDING, metadata=metadata, ) version.save() # Handle API pagination if version_api_response.get('next'): next_versions = requests.get(version_api_response.get('next')).json() import_versions_from_response(api_url, next_versions, dandiset)
def test_draft_version_metadata_computed(draft_version: Version): original_metadata = {'schemaVersion': settings.DANDI_SCHEMA_VERSION} draft_version.metadata = original_metadata # Save the version to add computed properties to the metadata draft_version.save() expected_metadata = { **original_metadata, 'manifestLocation': [ f'{settings.DANDI_API_URL}/api/dandisets/{draft_version.dandiset.identifier}/versions/draft/assets/' # noqa: E501 ], 'name': draft_version.name, 'identifier': f'DANDI:{draft_version.dandiset.identifier}', 'version': draft_version.version, 'id': f'DANDI:{draft_version.dandiset.identifier}/{draft_version.version}', 'url': ( f'{settings.DANDI_WEB_APP_URL}/dandiset/' f'{draft_version.dandiset.identifier}/{draft_version.version}' ), 'repository': settings.DANDI_WEB_APP_URL, 'dateCreated': draft_version.dandiset.created.isoformat(), '@context': f'https://raw.githubusercontent.com/dandi/schema/master/releases/{settings.DANDI_SCHEMA_VERSION}/context.json', # noqa: E501 'assetsSummary': { 'numberOfBytes': 0, 'numberOfFiles': 0, 'schemaKey': 'AssetsSummary', }, } expected_metadata['citation'] = draft_version.citation(expected_metadata) assert draft_version.metadata == expected_metadata
def create_dev_dandiset(name: str, owner: str): owner = User.objects.get(email=owner) # Create a new dandiset dandiset = Dandiset() dandiset.save() dandiset.add_owner(owner) # Create the draft version version_metadata = { 'schemaVersion': settings.DANDI_SCHEMA_VERSION, 'schemaKey': 'Dandiset', 'description': 'An informative description', 'license': ['spdx:CC0-1.0'], 'contributor': [ { 'name': f'{owner.last_name}, {owner.first_name}', 'email': owner.email, 'roleName': ['dcite:ContactPerson'], 'schemaKey': 'Person', 'affiliation': [], 'includeInCitation': True, }, ], } draft_version = Version( dandiset=dandiset, name=name, metadata=version_metadata, version='draft', ) draft_version.save() uploaded_file = SimpleUploadedFile(name='foo/bar.txt', content=b'A' * 20) etag = '76d36e98f312e98ff908c8c82c8dd623-0' try: asset_blob = AssetBlob.objects.get(etag=etag) except AssetBlob.DoesNotExist: asset_blob = AssetBlob( blob_id=uuid4(), blob=uploaded_file, etag=etag, size=20, ) asset_blob.save() asset_metadata = { 'schemaVersion': settings.DANDI_SCHEMA_VERSION, 'encodingFormat': 'text/plain', 'schemaKey': 'Asset', } asset = Asset(blob=asset_blob, metadata=asset_metadata, path='foo/bar.txt') asset.save() draft_version.assets.add(asset) calculate_sha256(blob_id=asset_blob.blob_id) validate_asset_metadata(asset_id=asset.id) validate_version_metadata(version_id=draft_version.id)
def test_validate_version_metadata(version: Version, asset: Asset): version.assets.add(asset) tasks.validate_version_metadata(version.id) version.refresh_from_db() assert version.status == Version.Status.VALID assert version.validation_errors == []
def test_validate_version_metadata_malformed_schema_version( version: Version, asset: Asset): version.assets.add(asset) version.metadata['schemaVersion'] = 'xxx' version.save() tasks.validate_version_metadata(version.id) version.refresh_from_db() assert version.status == Version.Status.INVALID assert len(version.validation_errors) == 1 assert version.validation_errors[0]['message'].startswith( 'Metadata version xxx is not allowed.')
def test_version_next_published_version_save(mocker, dandiset, published_version_factory): # Given an existing version at the current time, a different one should be allocated next_published_version_spy = mocker.spy(Version, 'next_published_version') version_1 = published_version_factory(dandiset=dandiset) next_published_version_spy.assert_called_once() version_str_2 = Version.next_published_version(dandiset) assert version_1.version != version_str_2
def create(self, request): serializer = VersionMetadataSerializer(data=request.data) serializer.is_valid(raise_exception=True) version_metadata, created = VersionMetadata.objects.get_or_create( name=serializer.validated_data['name'], metadata=serializer.validated_data['metadata'], ) if created: version_metadata.save() if 'identifier' in serializer.validated_data['metadata']: identifier = serializer.validated_data['metadata']['identifier'] if identifier.startswith('DANDI:'): identifier = identifier[6:] try: dandiset = Dandiset(id=int(identifier)) except ValueError: return Response(f'Invalid Identifier {identifier}', status=400) else: dandiset = Dandiset() try: # Without force_insert, Django will try to UPDATE an existing dandiset if one exists. # We want to throw an error if a dandiset already exists. dandiset.save(force_insert=True) except IntegrityError as e: # https://stackoverflow.com/questions/25368020/django-deduce-duplicate-key-exception-from-integrityerror # https://www.postgresql.org/docs/13/errcodes-appendix.html # Postgres error code 23505 == unique_violation if e.__cause__.pgcode == '23505': return Response( f'Dandiset {dandiset.identifier} Already Exists', status=400) raise e assign_perm('owner', request.user, dandiset) # Create new draft version version = Version(dandiset=dandiset, metadata=version_metadata, version='draft') version.save() serializer = DandisetDetailSerializer(instance=dandiset) return Response(serializer.data, status=status.HTTP_200_OK)
def test_validate_version_metadata_malformed_license(version: Version, asset: Asset): version.assets.add(asset) version.metadata['license'] = 'foo' version.save() tasks.validate_version_metadata(version.id) version.refresh_from_db() assert version.status == Version.Status.INVALID assert version.validation_errors == [{ 'field': 'license', 'message': "'foo' is not of type 'array'" }]
def test_validate_version_metadata_no_description(version: Version, asset: Asset): version.assets.add(asset) del version.metadata['description'] version.save() tasks.validate_version_metadata(version.id) version.refresh_from_db() assert version.status == Version.Status.INVALID assert version.validation_errors == [{ 'field': '', 'message': "'description' is a required property" }]
def test_published_version_metadata_computed(published_version: Version): original_metadata = {'schemaVersion': settings.DANDI_SCHEMA_VERSION} published_version.metadata = original_metadata # Save the version to add computed properties to the metadata published_version.save() expected_metadata = { **original_metadata, 'manifestLocation': [ ( f'http://{settings.MINIO_STORAGE_ENDPOINT}/test-dandiapi-dandisets' f'/test-prefix/dandisets/{published_version.dandiset.identifier}' f'/{published_version.version}/assets.yaml' ) ], 'name': published_version.name, 'identifier': f'DANDI:{published_version.dandiset.identifier}', 'version': published_version.version, 'id': f'DANDI:{published_version.dandiset.identifier}/{published_version.version}', 'doi': f'10.80507/dandi.{published_version.dandiset.identifier}/{published_version.version}', # noqa: E501 'url': ( f'{settings.DANDI_WEB_APP_URL}/dandiset/' f'{published_version.dandiset.identifier}/{published_version.version}' ), 'repository': settings.DANDI_WEB_APP_URL, 'dateCreated': published_version.dandiset.created.isoformat(), '@context': f'https://raw.githubusercontent.com/dandi/schema/master/releases/{settings.DANDI_SCHEMA_VERSION}/context.json', # noqa: E501 'assetsSummary': { 'numberOfBytes': 0, 'numberOfFiles': 0, 'schemaKey': 'AssetsSummary', }, } expected_metadata['citation'] = published_version.citation(expected_metadata) assert published_version.metadata == expected_metadata
def publish(self, request, **kwargs): old_version = self.get_object() # TODO @permission_required doesn't work on methods # https://github.com/django-guardian/django-guardian/issues/723 response = get_40x_or_None(request, ['owner'], old_version.dandiset, return_403=True) if response: return response new_version = Version.copy(old_version) new_version.doi = doi.create_doi(new_version) new_version.save() for asset in old_version.assets.all(): new_version.assets.add(asset) new_version.write_yamls() serializer = VersionSerializer(new_version) return Response(serializer.data, status=status.HTTP_200_OK)
def update(self, request, **kwargs): """Update the metadata of a version.""" version: Version = self.get_object() if version.version != 'draft': return Response( 'Only draft versions can be modified.', status=status.HTTP_405_METHOD_NOT_ALLOWED, ) serializer = VersionMetadataSerializer(data=request.data) serializer.is_valid(raise_exception=True) name = serializer.validated_data['name'] metadata = serializer.validated_data['metadata'] # Strip away any computed fields metadata = Version.strip_metadata(metadata) version.name = name version.metadata = metadata version.status = Version.Status.PENDING version.save() serializer = VersionDetailSerializer(instance=version) return Response(serializer.data, status=status.HTTP_200_OK)
def test_version_rest_publish_assets( api_client, user: User, draft_version: Version, draft_asset_factory, published_asset_factory, ): assign_perm('owner', user, draft_version.dandiset) api_client.force_authenticate(user=user) old_draft_asset: Asset = draft_asset_factory() old_published_asset: Asset = published_asset_factory() old_published_asset.publish() old_published_asset.save() assert not old_draft_asset.published assert old_published_asset.published draft_version.assets.add(old_draft_asset) draft_version.assets.add(old_published_asset) # Validate the metadata to mark the assets and version as `VALID` tasks.validate_asset_metadata(old_draft_asset.id) tasks.validate_asset_metadata(old_published_asset.id) tasks.validate_version_metadata(draft_version.id) draft_version.refresh_from_db() assert draft_version.valid resp = api_client.post( f'/api/dandisets/{draft_version.dandiset.identifier}' f'/versions/{draft_version.version}/publish/' ) assert resp.status_code == 200 published_version = Version.objects.get(version=resp.data['version']) assert published_version.assets.count() == 2 new_draft_asset: Asset = published_version.assets.get(asset_id=old_draft_asset.asset_id) new_published_asset: Asset = published_version.assets.get(asset_id=old_published_asset.asset_id) # The former draft asset should have been modified into a published asset assert new_draft_asset.published assert new_draft_asset.asset_id == old_draft_asset.asset_id assert new_draft_asset.path == old_draft_asset.path assert new_draft_asset.blob == old_draft_asset.blob assert new_draft_asset.metadata == { **old_draft_asset.metadata, 'datePublished': UTC_ISO_TIMESTAMP_RE, 'publishedBy': { 'id': URN_RE, 'name': 'DANDI publish', 'startDate': UTC_ISO_TIMESTAMP_RE, # TODO endDate needs to be defined before publish is complete 'endDate': UTC_ISO_TIMESTAMP_RE, 'wasAssociatedWith': [ { 'id': URN_RE, 'identifier': 'RRID:SCR_017571', 'name': 'DANDI API', 'version': '0.1.0', 'schemaKey': 'Software', } ], 'schemaKey': 'PublishActivity', }, } # The published_asset should be completely unchanged assert new_published_asset.published assert new_published_asset.asset_id == old_published_asset.asset_id assert new_published_asset.path == old_published_asset.path assert new_published_asset.blob == old_published_asset.blob assert new_published_asset.metadata == old_published_asset.metadata
def test_version_rest_publish(api_client, user: User, draft_version: Version, asset: Asset): assign_perm('owner', user, draft_version.dandiset) api_client.force_authenticate(user=user) draft_version.assets.add(asset) # Validate the metadata to mark the version and asset as `VALID` tasks.validate_version_metadata(draft_version.id) tasks.validate_asset_metadata(asset.id) draft_version.refresh_from_db() assert draft_version.valid resp = api_client.post( f'/api/dandisets/{draft_version.dandiset.identifier}' f'/versions/{draft_version.version}/publish/' ) assert resp.data == { 'dandiset': { 'identifier': draft_version.dandiset.identifier, 'created': TIMESTAMP_RE, 'modified': TIMESTAMP_RE, 'contact_person': draft_version.metadata['contributor'][0]['name'], 'embargo_status': 'OPEN', }, 'version': VERSION_ID_RE, 'name': draft_version.name, 'created': TIMESTAMP_RE, 'modified': TIMESTAMP_RE, 'asset_count': 1, 'size': draft_version.size, 'status': 'Valid', } published_version = Version.objects.get(version=resp.data['version']) assert published_version assert draft_version.dandiset.versions.count() == 2 published_asset: Asset = published_version.assets.get() assert published_asset.published # The asset should be the same after publishing assert asset.asset_id == published_asset.asset_id assert published_version.metadata == { **draft_version.metadata, 'publishedBy': { 'id': URN_RE, 'name': 'DANDI publish', 'startDate': UTC_ISO_TIMESTAMP_RE, 'endDate': UTC_ISO_TIMESTAMP_RE, 'wasAssociatedWith': [ { 'id': URN_RE, 'identifier': 'RRID:SCR_017571', 'name': 'DANDI API', # TODO version the API 'version': '0.1.0', 'schemaKey': 'Software', } ], 'schemaKey': 'PublishActivity', }, 'datePublished': UTC_ISO_TIMESTAMP_RE, 'manifestLocation': [ f'http://{settings.MINIO_STORAGE_ENDPOINT}/test-dandiapi-dandisets/test-prefix/dandisets/{draft_version.dandiset.identifier}/{published_version.version}/assets.yaml', # noqa: E501 ], 'identifier': f'DANDI:{draft_version.dandiset.identifier}', 'version': published_version.version, 'id': f'DANDI:{draft_version.dandiset.identifier}/{published_version.version}', 'url': ( f'{settings.DANDI_WEB_APP_URL}/dandiset/{draft_version.dandiset.identifier}' f'/{published_version.version}' ), 'citation': published_version.citation(published_version.metadata), 'doi': f'10.80507/dandi.{draft_version.dandiset.identifier}/{published_version.version}', # Once the assets are linked, assetsSummary should be computed properly 'assetsSummary': { 'schemaKey': 'AssetsSummary', 'numberOfBytes': 100, 'numberOfFiles': 1, 'dataStandard': [ { 'schemaKey': 'StandardsType', 'identifier': 'RRID:SCR_015242', 'name': 'Neurodata Without Borders (NWB)', } ], 'approach': [], 'measurementTechnique': [], 'variableMeasured': [], 'species': [], }, } draft_version.refresh_from_db() assert draft_version.status == Version.Status.PUBLISHED assert not draft_version.valid
def version(self): return Version.next_published_version(self.dandiset)
def test_version_make_version_nosave(dandiset): # Without saving, the output should be reproducible version_str_1 = Version.make_version(dandiset) version_str_2 = Version.make_version(dandiset) assert version_str_1 == version_str_2 assert version_str_1 == VERSION_ID_RE
def create(self, request: Request): """Create a new dandiset.""" serializer = VersionMetadataSerializer(data=request.data) serializer.is_valid(raise_exception=True) query_serializer = CreateDandisetQueryParameterSerializer( data=request.query_params) query_serializer.is_valid(raise_exception=True) if query_serializer.validated_data['embargo']: embargo_status = Dandiset.EmbargoStatus.EMBARGOED else: embargo_status = Dandiset.EmbargoStatus.OPEN name = serializer.validated_data['name'] metadata = serializer.validated_data['metadata'] # Strip away any computed fields metadata = Version.strip_metadata(metadata) # Only inject a schemaVersion and default contributor field if they are # not specified in the metadata metadata = { 'schemaKey': 'Dandiset', 'schemaVersion': settings.DANDI_SCHEMA_VERSION, 'contributor': [ { 'name': f'{request.user.last_name}, {request.user.first_name}', 'email': request.user.email, 'roleName': ['dcite:ContactPerson'], 'schemaKey': 'Person', 'affiliation': [], 'includeInCitation': True, }, ], # TODO: move this into dandischema 'access': [{ 'schemaKey': 'AccessRequirements', 'status': 'dandi:OpenAccess' if embargo_status == Dandiset.EmbargoStatus.OPEN else 'dandi:EmbargoedAccess', }], **metadata, } # Run the metadata through the pydantic model to automatically include any boilerplate # like the access or repository fields metadata = PydanticDandiset.unvalidated(**metadata).json_dict() if 'identifier' in serializer.validated_data['metadata']: identifier = serializer.validated_data['metadata']['identifier'] if identifier and not request.user.is_superuser: return Response( 'Creating a dandiset for a given identifier ' f'({identifier} was provided) is admin only operation.', status=403, ) if identifier.startswith('DANDI:'): identifier = identifier[6:] try: dandiset = Dandiset(id=int(identifier), embargo_status=embargo_status) except ValueError: return Response(f'Invalid Identifier {identifier}', status=400) else: dandiset = Dandiset(embargo_status=embargo_status) try: # Without force_insert, Django will try to UPDATE an existing dandiset if one exists. # We want to throw an error if a dandiset already exists. dandiset.save(force_insert=True) except IntegrityError as e: # https://stackoverflow.com/questions/25368020/django-deduce-duplicate-key-exception-from-integrityerror # https://www.postgresql.org/docs/13/errcodes-appendix.html # Postgres error code 23505 == unique_violation if e.__cause__.pgcode == '23505': return Response( f'Dandiset {dandiset.identifier} Already Exists', status=400) raise e logging.info( 'Created dandiset %s given request with name=%s and metadata=%s', dandiset.identifier, name, metadata, ) assign_perm('owner', request.user, dandiset) # Create new draft version version = Version( dandiset=dandiset, name=name, metadata=metadata, version='draft', status=Version.Status.PENDING, ) version.save() serializer = DandisetDetailSerializer(instance=dandiset) return Response(serializer.data, status=status.HTTP_200_OK)