def create_datafile(index): testfile = path.join(path.dirname(__file__), 'fixtures', 'jeol_sem_test%d.txt' % index) size, sha512sum = get_size_and_sha512sum(testfile) datafile = Dataset_File(dataset=dataset, filename=path.basename(testfile), size=size, sha512sum=sha512sum) datafile.save() base_url = 'file://' + path.abspath(path.dirname(testfile)) location = Location.load_location({ 'name': 'test-jeol', 'url': base_url, 'type': 'external', 'priority': 10, 'transfer_provider': 'local' }) replica = Replica(datafile=datafile, url='file://' + path.abspath(testfile), protocol='file', location=location) replica.verify() replica.save() return Dataset_File.objects.get(pk=datafile.pk)
def create_staging_datafile(filepath, username, dataset_id): dataset = Dataset.objects.get(id=dataset_id) url, size = get_staging_url_and_size(username, filepath) datafile = Dataset_File(dataset=dataset, filename=path.basename(filepath), size=size) replica = Replica(datafile=datafile, protocol='staging', url=url, location=Location.get_location('staging')) replica.verify(allowEmptyChecksums=True) datafile.save() replica.datafile = datafile replica.save()
def create_staging_datafile(filepath, username, dataset_id): from tardis.tardis_portal.models import Dataset_File, Dataset, Replica, \ Location dataset = Dataset.objects.get(id=dataset_id) url, size = get_staging_url_and_size(username, filepath) datafile = Dataset_File(dataset=dataset, filename=path.basename(filepath), size=size) replica = Replica(datafile=datafile, protocol='staging', url=url, location=Location.get_location('staging')) replica.verify(allowEmptyChecksums=True) datafile.save() replica.datafile = datafile replica.save()
def _create_datafile(): user = User.objects.create_user("testuser", "*****@*****.**", "pwd") user.save() UserProfile(user=user).save() Location.force_initialize() full_access = Experiment.PUBLIC_ACCESS_FULL experiment = Experiment.objects.create(title="IIIF Test", created_by=user, public_access=full_access) experiment.save() ObjectACL( content_object=experiment, pluginId="django_user", entityId=str(user.id), isOwner=True, canRead=True, canWrite=True, canDelete=True, aclOwnershipType=ObjectACL.OWNER_OWNED, ).save() dataset = Dataset() dataset.save() dataset.experiments.add(experiment) dataset.save() # Create new Datafile tempfile = TemporaryUploadedFile("iiif_stored_file", None, None, None) with Image(filename="magick:rose") as img: img.format = "tiff" img.save(file=tempfile.file) tempfile.file.flush() datafile = Dataset_File(dataset=dataset, size=os.path.getsize(tempfile.file.name), filename="iiif_named_file") replica = Replica( datafile=datafile, url=write_uploaded_file_to_dataset(dataset, tempfile), location=Location.get_default_location(), ) replica.verify(allowEmptyChecksums=True) datafile.save() replica.datafile = datafile replica.save() return datafile
def _create_datafile(): user = User.objects.create_user('testuser', '*****@*****.**', 'pwd') user.save() UserProfile(user=user).save() Location.force_initialize() full_access = Experiment.PUBLIC_ACCESS_FULL experiment = Experiment.objects.create(title="IIIF Test", created_by=user, public_access=full_access) experiment.save() ObjectACL(content_object=experiment, pluginId='django_user', entityId=str(user.id), isOwner=True, canRead=True, canWrite=True, canDelete=True, aclOwnershipType=ObjectACL.OWNER_OWNED).save() dataset = Dataset() dataset.save() dataset.experiments.add(experiment) dataset.save() # Create new Datafile tempfile = TemporaryUploadedFile('iiif_stored_file', None, None, None) with Image(filename='magick:rose') as img: img.format = 'tiff' img.save(file=tempfile.file) tempfile.file.flush() datafile = Dataset_File(dataset=dataset, size=os.path.getsize(tempfile.file.name), filename='iiif_named_file') replica = Replica(datafile=datafile, url=write_uploaded_file_to_dataset(dataset, tempfile), location=Location.get_default_location()) replica.verify(allowEmptyChecksums=True) datafile.save() replica.datafile = datafile replica.save() return datafile
def fpupload(request, dataset_id): """ Uploads all files picked by filepicker to the dataset :param request: a HTTP Request instance :type request: :class:`django.http.HttpRequest` :param dataset_id: the dataset_id :type dataset_id: integer :returns: boolean true if successful :rtype: bool """ dataset = Dataset.objects.get(id=dataset_id) logger.debug('called fpupload') if request.method == 'POST': logger.debug('got POST') for key, val in request.POST.items(): splits = val.split(",") for url in splits: try: fp = FilepickerFile(url) except ValueError: pass else: picked_file = fp.get_file() filepath = write_uploaded_file_to_dataset(dataset, picked_file) datafile = Dataset_File(dataset=dataset, filename=picked_file.name, size=picked_file.size) replica = Replica(datafile=datafile, url=filepath, protocol='', location=Location.get_default_location()) replica.verify(allowEmptyChecksums=True) datafile.save() replica.datafile = datafile replica.save() return HttpResponse(json.dumps({"result": True}))
def _build_datafile(self, testfile, filename, dataset, url, protocol='', checksum=None, size=None, mimetype=''): filesize, sha512sum = get_size_and_sha512sum(testfile) datafile = Dataset_File( dataset=dataset, filename=filename, mimetype=mimetype, size=str(size if size != None else filesize), sha512sum=(checksum if checksum else sha512sum)) datafile.save() if urlparse.urlparse(url).scheme == '': location = Location.get_location('local') else: location = Location.get_location_for_url(url) if not location: location = Location.load_location({ 'name': filename, 'url': urlparse.urljoin(url, '.'), 'type': 'external', 'priority': 10, 'transfer_provider': 'local' }) replica = Replica(datafile=datafile, protocol=protocol, url=url, location=location) replica.verify() replica.save() return Dataset_File.objects.get(pk=datafile.pk)
def fpupload(request, dataset_id): """ Uploads all files picked by filepicker to the dataset :param request: a HTTP Request instance :type request: :class:`django.http.HttpRequest` :param dataset_id: the dataset_id :type dataset_id: integer :returns: boolean true if successful :rtype: bool """ dataset = Dataset.objects.get(id=dataset_id) logger.debug('called fpupload') if request.method == 'POST': logger.debug('got POST') for key, val in request.POST.items(): splits = val.split(",") for url in splits: try: fp = FilepickerFile(url) except ValueError: pass else: picked_file = fp.get_file() filepath = write_uploaded_file_to_dataset( dataset, picked_file) datafile = Dataset_File(dataset=dataset, filename=picked_file.name, size=picked_file.size) replica = Replica(datafile=datafile, url=filepath, protocol='', location=Location.get_default_location()) replica.verify(allowEmptyChecksums=True) datafile.save() replica.datafile = datafile replica.save() return HttpResponse(json.dumps({"result": True}))
def create_datafile(file_path): testfile = path.join(path.dirname(__file__), 'fixtures', file_path) size, sha512sum = get_size_and_sha512sum(testfile) datafile = Dataset_File(dataset=dataset, filename=path.basename(testfile), size=size, sha512sum=sha512sum) datafile.save() base_url = 'file://' + path.abspath(path.dirname(testfile)) location = Location.load_location({ 'name': 'test-flexstation', 'url': base_url, 'type': 'external', 'priority': 10, 'transfer_provider': 'local'}) replica = Replica(datafile=datafile, url='file://'+ path.abspath(testfile), protocol='file', location=location) replica.verify() replica.save() return Dataset_File.objects.get(pk=datafile.pk)
def _build_datafile(self, testfile, filename, dataset, url, protocol='', checksum=None, size=None, mimetype=''): filesize, sha512sum = get_size_and_sha512sum(testfile) datafile = Dataset_File(dataset=dataset, filename=filename, mimetype=mimetype, size=str(size if size != None else filesize), sha512sum=(checksum if checksum else sha512sum)) datafile.save() if urlparse.urlparse(url).scheme == '': location = Location.get_location('local') else: location = Location.get_location_for_url(url) if not location: location = Location.load_location({ 'name': filename, 'url': urlparse.urljoin(url, '.'), 'type': 'external', 'priority': 10, 'transfer_provider': 'local'}) replica = Replica(datafile=datafile, protocol=protocol, url=url, location=location) replica.verify() replica.save() return Dataset_File.objects.get(pk=datafile.pk)
def generate_datafile(path, dataset, content=None, size=-1, verify=True, verified=True): '''Generates a datafile AND a replica to hold its contents''' from tardis.tardis_portal.models import Dataset_File, Replica, Location saved = settings.REQUIRE_DATAFILE_CHECKSUMS settings.REQUIRE_DATAFILE_CHECKSUMS = False try: datafile = Dataset_File() if content: datafile.size = str(len(content)) else: datafile.size = str(size) # Normally we use any old string for the datafile path, but some # tests require the path to be the same as what 'staging' would use if path == None: datafile.dataset_id = dataset.id datafile.save() path = "%s/%s/%s" % (dataset.get_first_experiment().id, dataset.id, datafile.id) filepath = os.path.normpath(FILE_STORE_PATH + '/' + path) if content: try: os.makedirs(os.path.dirname(filepath)) os.remove(filepath) except: pass file = open(filepath, 'wb+') file.write(content) file.close() datafile.mimetype = "application/unspecified" datafile.filename = os.path.basename(filepath) datafile.dataset_id = dataset.id datafile.save() location = _infer_location(path) replica = Replica(datafile=datafile, url=path, protocol='', location=location) if verify and content: if not replica.verify(allowEmptyChecksums=True): raise RuntimeError('verify failed!?!') else: replica.verified = verified replica.save() return (datafile, replica) finally: settings.REQUIRE_DATAFILE_CHECKSUMS = saved
def generate_datafile(path, dataset, content=None, size=-1, verify=True, verified=True, verify_checksums_req=False): '''Generates a datafile AND a replica to hold its contents''' from tardis.tardis_portal.models import Dataset_File, Replica, Location saved = settings.REQUIRE_DATAFILE_CHECKSUMS settings.REQUIRE_DATAFILE_CHECKSUMS = False try: datafile = Dataset_File() if content: datafile.size = str(len(content)) else: datafile.size = str(size) # Normally we use any old string for the datafile path, but some # tests require the path to be the same as what 'staging' would use if path == None: datafile.dataset_id = dataset.id datafile.save() path = "%s/%s/%s" % (dataset.get_first_experiment().id, dataset.id, datafile.id) filepath = os.path.normpath(settings.FILE_STORE_PATH + '/' + path) if content: try: os.makedirs(os.path.dirname(filepath)) os.remove(filepath) except: pass gen_file = open(filepath, 'wb+') gen_file.write(content) gen_file.close() datafile.mimetype = "application/unspecified" datafile.filename = os.path.basename(filepath) datafile.dataset_id = dataset.id datafile.save() settings.REQUIRE_DATAFILE_CHECKSUMS = verify_checksums_req location = _infer_location(path) replica = Replica(datafile=datafile, url=path, protocol='', location=location) if verify and content: if not replica.verify(): raise RuntimeError('verify failed!?!') replica.save() replica.verified = verified replica.save(update_fields=['verified']) # force no verification return (datafile, replica) finally: settings.REQUIRE_DATAFILE_CHECKSUMS = saved