def do_provider(self, loc): provider = loc.provider base_url = loc.url datafile, replica = generate_datafile("1/1/3", self.dataset, "Hi mum") self.assertEquals(replica.verify(allowEmptyChecksums=True), True) target_replica = Replica() target_replica.datafile = datafile target_replica.location = loc url = provider.generate_url(target_replica) self.assertEquals(url, base_url + '1/1/3') target_replica.url = url provider.put_file(replica, target_replica) self.assertEqual(replica.location.provider.get_file(replica).read(), "Hi mum") self.assertEqual(provider.get_file(target_replica).read(), "Hi mum") self.assertEqual(provider.get_length(target_replica), 6) try: self.maxDiff = None self.assertEqual(provider.get_metadata(target_replica), {'sha512sum' : '2274cc8c16503e3d182ffaa835c543b' + 'ce278bc8fc971f3bf38b94b4d9db44cd89c8f36d4006e' + '5abea29bc05f7f0ea662cb4b0e805e56bbce97f00f94e' + 'a6e6498', 'md5sum' : '3b6b51114c3d0ad347e20b8e79765951', 'length' : '6'}) except NotImplementedError: pass provider.remove_file(target_replica) with self.assertRaises(TransferError): provider.get_length(target_replica)
def create_staging_datafile(filepath, username, dataset_id): dataset = Dataset.objects.get(id=dataset_id) url, size = get_staging_url_and_size(username, filepath) datafile = Dataset_File(dataset=dataset, filename=path.basename(filepath), size=size) replica = Replica(datafile=datafile, protocol='staging', url=url, location=Location.get_location('staging')) replica.verify(allowEmptyChecksums=True) datafile.save() replica.datafile = datafile replica.save()
def testMirror(self): dest = Location.get_location('test') datafile, replica = generate_datafile(None, self.dataset, "Hi granny") path = datafile.get_absolute_filepath() self.assertTrue(os.path.exists(path)) dummy_replica = Replica() dummy_replica.datafile = datafile dummy_replica.location = Location.objects.get(name='test') dummy_replica.url = dummy_replica.generate_default_url() with self.assertRaises(TransferError): dest.provider.get_length(dummy_replica) self.assertTrue(migrate_replica(replica, dest, mirror=True)) datafile = Dataset_File.objects.get(id=datafile.id) self.assertTrue(datafile.is_local()) self.assertEquals(dest.provider.get_length(dummy_replica), 9)
def create_staging_datafile(filepath, username, dataset_id): from tardis.tardis_portal.models import Dataset_File, Dataset, Replica, \ Location dataset = Dataset.objects.get(id=dataset_id) url, size = get_staging_url_and_size(username, filepath) datafile = Dataset_File(dataset=dataset, filename=path.basename(filepath), size=size) replica = Replica(datafile=datafile, protocol='staging', url=url, location=Location.get_location('staging')) replica.verify(allowEmptyChecksums=True) datafile.save() replica.datafile = datafile replica.save()
def _create_datafile(): user = User.objects.create_user("testuser", "*****@*****.**", "pwd") user.save() UserProfile(user=user).save() Location.force_initialize() full_access = Experiment.PUBLIC_ACCESS_FULL experiment = Experiment.objects.create(title="IIIF Test", created_by=user, public_access=full_access) experiment.save() ObjectACL( content_object=experiment, pluginId="django_user", entityId=str(user.id), isOwner=True, canRead=True, canWrite=True, canDelete=True, aclOwnershipType=ObjectACL.OWNER_OWNED, ).save() dataset = Dataset() dataset.save() dataset.experiments.add(experiment) dataset.save() # Create new Datafile tempfile = TemporaryUploadedFile("iiif_stored_file", None, None, None) with Image(filename="magick:rose") as img: img.format = "tiff" img.save(file=tempfile.file) tempfile.file.flush() datafile = Dataset_File(dataset=dataset, size=os.path.getsize(tempfile.file.name), filename="iiif_named_file") replica = Replica( datafile=datafile, url=write_uploaded_file_to_dataset(dataset, tempfile), location=Location.get_default_location(), ) replica.verify(allowEmptyChecksums=True) datafile.save() replica.datafile = datafile replica.save() return datafile
def _create_datafile(): user = User.objects.create_user('testuser', '*****@*****.**', 'pwd') user.save() UserProfile(user=user).save() Location.force_initialize() full_access = Experiment.PUBLIC_ACCESS_FULL experiment = Experiment.objects.create(title="IIIF Test", created_by=user, public_access=full_access) experiment.save() ObjectACL(content_object=experiment, pluginId='django_user', entityId=str(user.id), isOwner=True, canRead=True, canWrite=True, canDelete=True, aclOwnershipType=ObjectACL.OWNER_OWNED).save() dataset = Dataset() dataset.save() dataset.experiments.add(experiment) dataset.save() # Create new Datafile tempfile = TemporaryUploadedFile('iiif_stored_file', None, None, None) with Image(filename='magick:rose') as img: img.format = 'tiff' img.save(file=tempfile.file) tempfile.file.flush() datafile = Dataset_File(dataset=dataset, size=os.path.getsize(tempfile.file.name), filename='iiif_named_file') replica = Replica(datafile=datafile, url=write_uploaded_file_to_dataset(dataset, tempfile), location=Location.get_default_location()) replica.verify(allowEmptyChecksums=True) datafile.save() replica.datafile = datafile replica.save() return datafile
def fpupload(request, dataset_id): """ Uploads all files picked by filepicker to the dataset :param request: a HTTP Request instance :type request: :class:`django.http.HttpRequest` :param dataset_id: the dataset_id :type dataset_id: integer :returns: boolean true if successful :rtype: bool """ dataset = Dataset.objects.get(id=dataset_id) logger.debug('called fpupload') if request.method == 'POST': logger.debug('got POST') for key, val in request.POST.items(): splits = val.split(",") for url in splits: try: fp = FilepickerFile(url) except ValueError: pass else: picked_file = fp.get_file() filepath = write_uploaded_file_to_dataset(dataset, picked_file) datafile = Dataset_File(dataset=dataset, filename=picked_file.name, size=picked_file.size) replica = Replica(datafile=datafile, url=filepath, protocol='', location=Location.get_default_location()) replica.verify(allowEmptyChecksums=True) datafile.save() replica.datafile = datafile replica.save() return HttpResponse(json.dumps({"result": True}))
def fpupload(request, dataset_id): """ Uploads all files picked by filepicker to the dataset :param request: a HTTP Request instance :type request: :class:`django.http.HttpRequest` :param dataset_id: the dataset_id :type dataset_id: integer :returns: boolean true if successful :rtype: bool """ dataset = Dataset.objects.get(id=dataset_id) logger.debug('called fpupload') if request.method == 'POST': logger.debug('got POST') for key, val in request.POST.items(): splits = val.split(",") for url in splits: try: fp = FilepickerFile(url) except ValueError: pass else: picked_file = fp.get_file() filepath = write_uploaded_file_to_dataset( dataset, picked_file) datafile = Dataset_File(dataset=dataset, filename=picked_file.name, size=picked_file.size) replica = Replica(datafile=datafile, url=filepath, protocol='', location=Location.get_default_location()) replica.verify(allowEmptyChecksums=True) datafile.save() replica.datafile = datafile replica.save() return HttpResponse(json.dumps({"result": True}))
def do_provider(self, loc): provider = loc.provider base_url = loc.url datafile, replica = generate_datafile("1/1/3", self.dataset, "Hi mum") self.assertEquals(replica.verify(allowEmptyChecksums=True), True) target_replica = Replica() target_replica.datafile = datafile target_replica.location = loc url = provider.generate_url(target_replica) self.assertEquals(url, base_url + '1/1/3') target_replica.url = url provider.put_file(replica, target_replica) self.assertEqual( replica.location.provider.get_file(replica).read(), "Hi mum") self.assertEqual(provider.get_file(target_replica).read(), "Hi mum") self.assertEqual(provider.get_length(target_replica), 6) try: self.maxDiff = None self.assertEqual( provider.get_metadata(target_replica), { 'sha512sum': '2274cc8c16503e3d182ffaa835c543b' + 'ce278bc8fc971f3bf38b94b4d9db44cd89c8f36d4006e' + '5abea29bc05f7f0ea662cb4b0e805e56bbce97f00f94e' + 'a6e6498', 'md5sum': '3b6b51114c3d0ad347e20b8e79765951', 'length': '6' }) except NotImplementedError: pass provider.remove_file(target_replica) with self.assertRaises(TransferError): provider.get_length(target_replica)
def migrate_replica(replica, location, noRemove=False, mirror=False): """ Migrate the replica to a different storage location. The overall effect will be that the datafile will be stored at the new location and removed from the current location, and the datafile metadata will be updated to reflect this. """ from tardis.tardis_portal.models import Replica, Location with transaction.commit_on_success(): replica = Replica.objects.select_for_update().get(pk=replica.pk) source = Location.get_location(replica.location.name) if not replica.verified or location.provider.trust_length: raise MigrationError('Only verified datafiles can be migrated' \ ' to this destination') filename = replica.get_absolute_filepath() try: newreplica = Replica.objects.get(datafile=replica.datafile, location=location) created_replica = False # We've most likely mirrored this file previously. But if # we are about to delete the source Replica, we need to check # that the target Replica still verifies. if not mirror and not check_file_transferred(newreplica, location): raise MigrationError('Previously mirrored / migrated Replica' \ ' no longer verifies locally!') except Replica.DoesNotExist: newreplica = Replica() newreplica.location = location newreplica.datafile = replica.datafile newreplica.protocol = '' newreplica.stay_remote = location != Location.get_default_location() newreplica.verified = False url = location.provider.generate_url(newreplica) if newreplica.url == url: # We should get here ... raise MigrationError('Cannot migrate a replica to its' \ ' current location') newreplica.url = url location.provider.put_file(replica, newreplica) verified = False try: verified = check_file_transferred(newreplica, location) except: # FIXME - should we always do this? location.provider.remove_file(newreplica) raise newreplica.verified = verified newreplica.save() logger.info('Transferred file %s for replica %s' % (filename, replica.id)) created_replica = True if mirror: return created_replica # FIXME - do this more reliably ... replica.delete() if not noRemove: source.provider.remove_file(replica) logger.info('Removed local file %s for replica %s' % (filename, replica.id)) return True
def migrate_replica(replica, location, noRemove=False, mirror=False): """ Migrate the replica to a different storage location. The overall effect will be that the datafile will be stored at the new location and removed from the current location, and the datafile metadata will be updated to reflect this. """ from tardis.tardis_portal.models import Replica, Location with transaction.commit_on_success(): replica = Replica.objects.select_for_update().get(pk=replica.pk) source = Location.get_location(replica.location.name) if not replica.verified or location.provider.trust_length: raise MigrationError('Only verified datafiles can be migrated' \ ' to this destination') filename = replica.get_absolute_filepath() try: newreplica = Replica.objects.get(datafile=replica.datafile, location=location) created_replica = False # We've most likely mirrored this file previously. But if # we are about to delete the source Replica, we need to check # that the target Replica still verifies. if not mirror and not check_file_transferred(newreplica, location): raise MigrationError('Previously mirrored / migrated Replica' \ ' no longer verifies locally!') except Replica.DoesNotExist: newreplica = Replica() newreplica.location = location newreplica.datafile = replica.datafile newreplica.protocol = '' newreplica.stay_remote = location != Location.get_default_location( ) newreplica.verified = False url = location.provider.generate_url(newreplica) if newreplica.url == url: # We should get here ... raise MigrationError('Cannot migrate a replica to its' \ ' current location') newreplica.url = url location.provider.put_file(replica, newreplica) verified = False try: verified = check_file_transferred(newreplica, location) except: # FIXME - should we always do this? location.provider.remove_file(newreplica) raise newreplica.verified = verified newreplica.save() logger.info('Transferred file %s for replica %s' % (filename, replica.id)) created_replica = True if mirror: return created_replica # FIXME - do this more reliably ... replica.delete() if not noRemove: source.provider.remove_file(replica) logger.info('Removed local file %s for replica %s' % (filename, replica.id)) return True