Exemple #1
0
def fpupload(request, dataset_id):
    """
    Uploads all files picked by filepicker to the dataset

    :param request: a HTTP Request instance
    :type request: :class:`django.http.HttpRequest`
    :param dataset_id: the dataset_id
    :type dataset_id: integer
    :returns: boolean true if successful
    :rtype: bool
    """

    dataset = Dataset.objects.get(id=dataset_id)
    logger.debug('called fpupload')

    if request.method == 'POST':
        logger.debug('got POST')
        for key, val in request.POST.items():
            splits = val.split(",")
            for url in splits:
                try:
                    fp = FilepickerFile(url)
                except ValueError:
                    pass
                else:
                    picked_file = fp.get_file()
                    datafile = DataFile(dataset=dataset,
                                        filename=picked_file.name,
                                        size=picked_file.size)
                    datafile.save()
                    datafile.file_object = picked_file

    return HttpResponse(json.dumps({"result": True}))
Exemple #2
0
def upload(request, dataset_id):
    """
    Uploads a datafile to the store and datafile metadata

    :param request: a HTTP Request instance
    :type request: :class:`django.http.HttpRequest`
    :param dataset_id: the dataset_id
    :type dataset_id: integer
    :returns: boolean true if successful
    :rtype: bool
    """

    dataset = Dataset.objects.get(id=dataset_id)

    logger.debug('called upload')
    if request.method == 'POST':
        logger.debug('got POST')
        if request.FILES:

            uploaded_file_post = request.FILES['Filedata']
            logger.debug('done upload')
            datafile = DataFile(dataset=dataset,
                                filename=uploaded_file_post.name,
                                size=uploaded_file_post.size)
            datafile.save(require_checksums=False)
            logger.debug('created file')
            datafile.file_object = uploaded_file_post
            logger.debug('saved datafile')

    return HttpResponse('True')
Exemple #3
0
def fpupload(request, dataset_id):
    """
    Uploads all files picked by filepicker to the dataset

    :param request: a HTTP Request instance
    :type request: :class:`django.http.HttpRequest`
    :param dataset_id: the dataset_id
    :type dataset_id: integer
    :returns: boolean true if successful
    :rtype: bool
    """

    dataset = Dataset.objects.get(id=dataset_id)
    logger.debug('called fpupload')

    if request.method == 'POST':
        logger.debug('got POST')
        for key, val in request.POST.items():
            splits = val.split(",")
            for url in splits:
                try:
                    fp = FilepickerFile(url)
                except ValueError:
                    pass
                else:
                    picked_file = fp.get_file()
                    datafile = DataFile(dataset=dataset,
                                        filename=picked_file.name,
                                        size=picked_file.size)
                    datafile.save()
                    datafile.file_object = picked_file

    return HttpResponse(json.dumps({"result": True}))
def upload(request, dataset_id):
    """
    Uploads a datafile to the store and datafile metadata

    :param request: a HTTP Request instance
    :type request: :class:`django.http.HttpRequest`
    :param dataset_id: the dataset_id
    :type dataset_id: integer
    :returns: boolean true if successful
    :rtype: bool
    """

    dataset = Dataset.objects.get(id=dataset_id)

    logger.debug('called upload')
    if request.method == 'POST':
        logger.debug('got POST')
        if request.FILES:

            uploaded_file_post = request.FILES['Filedata']
            logger.debug('done upload')
            datafile = DataFile(dataset=dataset,
                                filename=uploaded_file_post.name,
                                size=uploaded_file_post.size)
            datafile.save(require_checksums=False)
            logger.debug('created file')
            datafile.file_object = uploaded_file_post
            logger.debug('saved datafile')

    return HttpResponse('True')
Exemple #5
0
def create_staging_datafile(filepath, username, dataset_id):
    from tardis.tardis_portal.models import DataFile, Dataset
    dataset = Dataset.objects.get(id=dataset_id)

    url, size = get_staging_url_and_size(username, filepath)
    datafile = DataFile(dataset=dataset,
                        filename=path.basename(filepath),
                        size=size)
    datafile.save()
    datafile.file_object = open(filepath, 'r')
Exemple #6
0
def create_staging_datafile(filepath, username, dataset_id):
    init_filters()
    from tardis.tardis_portal.models import DataFile, Dataset
    dataset = Dataset.objects.get(id=dataset_id)

    url, size = get_staging_url_and_size(username, filepath)
    datafile = DataFile(dataset=dataset,
                        filename=path.basename(filepath),
                        size=size)
    datafile.save()
    datafile.file_object = open(filepath, 'r')
Exemple #7
0
    def test_wrong_size_verification(self):
        content = urandom(1024)
        cf = ContentFile(content, 'background_task_testfile')

        # Create new Datafile
        datafile = DataFile(dataset=self.dataset)
        datafile.filename = cf.name
        datafile.size = len(content) - 1
        datafile.sha512sum = hashlib.sha512(content).hexdigest()
        datafile.save()
        datafile.file_object = cf
        # verify explicitly to catch Exceptions hidden by celery
        datafile.verify()
        self.assertFalse(datafile.file_objects.get().verified)
Exemple #8
0
 def _build(dataset, filename, url=None):
     datafile = DataFile(dataset=dataset, filename=filename)
     datafile.save()
     if url is None:
         datafile.file_object = StringIO('bla')
         return datafile
     from tardis.tardis_portal.models import \
         DataFileObject
     dfo = DataFileObject(
         datafile=datafile,
         storage_box=datafile.get_default_storage_box(),
         uri=url)
     dfo.save()
     return datafile
 def _build(dataset, filename, url=None):
     datafile = DataFile(dataset=dataset, filename=filename)
     datafile.save()
     if url is None:
         datafile.file_object = StringIO('bla')
         return datafile
     from tardis.tardis_portal.models import \
         DataFileObject
     dfo = DataFileObject(
         datafile=datafile,
         storage_box=datafile.get_default_storage_box(),
         uri=url)
     dfo.save()
     return datafile
    def test_wrong_size_verification(self):
        content = urandom(1024)
        cf = ContentFile(content, 'background_task_testfile')

        # Create new Datafile
        datafile = DataFile(dataset=self.dataset)
        datafile.filename = cf.name
        datafile.size = len(content) - 1
        datafile.sha512sum = hashlib.sha512(content).hexdigest()
        datafile.save()
        datafile.file_object = cf
        # verify explicitly to catch Exceptions hidden by celery
        datafile.verify()
        self.assertFalse(datafile.file_objects.get().verified)
Exemple #11
0
 def _build(dataset, filename, url=None):
     datafile = DataFile(dataset=dataset, filename=filename)
     datafile.save()
     if url is None:
         datafile.file_object = StringIO(u'bla')
         return datafile
     dfo = DataFileObject(
         datafile=datafile,
         storage_box=datafile.get_default_storage_box(),
         uri=url)
     dfo.save()
     # Tests are run with CELERY_ALWAYS_EAGER = True,
     # so saving a DFO will trigger an immediate attempt
     # to verify the DFO which will trigger an attempt
     # to apply filters because we are overriding the
     # USE_FILTERS setting to True in this test:
     self.assertNotEqual(mock_send_task.call_count, 0)
     return datafile
Exemple #12
0
def _create_datafile():
    user = User.objects.create_user('testuser', '*****@*****.**', 'pwd')
    user.save()

    full_access = Experiment.PUBLIC_ACCESS_FULL
    experiment = Experiment.objects.create(title="IIIF Test",
                                           created_by=user,
                                           public_access=full_access)
    experiment.save()
    ObjectACL(content_object=experiment,
              pluginId='django_user',
              entityId=str(user.id),
              isOwner=True,
              canRead=True,
              canWrite=True,
              canDelete=True,
              aclOwnershipType=ObjectACL.OWNER_OWNED).save()
    dataset = Dataset()
    dataset.save()
    dataset.experiments.add(experiment)
    dataset.save()

    # Create new Datafile
    tempfile = TemporaryUploadedFile('iiif_stored_file', None, None, None)
    with Image(filename='magick:rose') as img:
        img.format = 'tiff'
        img.save(file=tempfile.file)
        tempfile.file.flush()
    datafile = DataFile(dataset=dataset,
                        size=os.path.getsize(tempfile.file.name),
                        filename='iiif_named_file',
                        mimetype='image/tiff')
    compute_md5 = getattr(settings, 'COMPUTE_MD5', True)
    compute_sha512 = getattr(settings, 'COMPUTE_SHA512', True)
    checksums = compute_checksums(open(tempfile.file.name, 'r'),
                                  compute_md5=compute_md5,
                                  compute_sha512=compute_sha512)
    if compute_md5:
        datafile.md5sum = checksums['md5sum']
    if compute_sha512:
        datafile.sha512sum = checksums['sha512sum']
    datafile.save()
    datafile.file_object = tempfile
    return datafile
Exemple #13
0
    def testLocalFile(self):
        content = urandom(1024)
        cf = ContentFile(content, 'background_task_testfile')

        # Create new Datafile
        datafile = DataFile(dataset=self.dataset)
        datafile.filename = cf.name
        datafile.size = len(content)
        datafile.sha512sum = hashlib.sha512(content).hexdigest()
        datafile.save()
        datafile.file_object = cf

        dfo = datafile.file_objects.all()[0]
        # undo auto-verify:
        dfo.verified = False
        dfo.save(update_fields=['verified'])

        # Check that it's not currently verified
        expect(datafile.verified).to_be(False)
        # Check it verifies
        verify_dfos()
        expect(datafile.verified).to_be(True)
    def testLocalFile(self):
        content = urandom(1024)
        cf = ContentFile(content, 'background_task_testfile')

        # Create new Datafile
        datafile = DataFile(dataset=self.dataset)
        datafile.filename = cf.name
        datafile.size = len(content)
        datafile.sha512sum = hashlib.sha512(content).hexdigest()
        datafile.save()
        datafile.file_object = cf

        dfo = datafile.file_objects.all()[0]
        # undo auto-verify:
        dfo.verified = False
        dfo.save(update_fields=['verified'])

        # Check that it's not currently verified
        expect(datafile.verified).to_be(False)
        # Check it verifies
        verify_dfos()
        expect(datafile.verified).to_be(True)
Exemple #15
0
def _create_datafile():
    user = User.objects.create_user("testuser", "*****@*****.**", "pwd")
    user.save()

    full_access = Experiment.PUBLIC_ACCESS_FULL
    experiment = Experiment.objects.create(title="IIIF Test", created_by=user, public_access=full_access)
    experiment.save()
    ObjectACL(
        content_object=experiment,
        pluginId="django_user",
        entityId=str(user.id),
        isOwner=True,
        canRead=True,
        canWrite=True,
        canDelete=True,
        aclOwnershipType=ObjectACL.OWNER_OWNED,
    ).save()
    dataset = Dataset()
    dataset.save()
    dataset.experiments.add(experiment)
    dataset.save()

    # Create new Datafile
    tempfile = TemporaryUploadedFile("iiif_stored_file", None, None, None)
    with Image(filename="magick:rose") as img:
        img.format = "tiff"
        img.save(file=tempfile.file)
        tempfile.file.flush()
    datafile = DataFile(
        dataset=dataset, size=os.path.getsize(tempfile.file.name), filename="iiif_named_file", mimetype="image/tiff"
    )
    checksums = compute_checksums(open(tempfile.file.name, "r"))
    datafile.md5sum = checksums["md5sum"]
    datafile.sha512sum = checksums["sha512sum"]
    datafile.save()
    datafile.file_object = tempfile
    return datafile