Beispiel #1
0
    def testRemoteFile(self):
            content = urandom(1024)
            with NamedTemporaryFile() as f:
                # Create new Datafile
                datafile = Dataset_File(dataset=self.dataset)
                datafile.filename = 'background_task_testfile'
                datafile.size = len(content)
                datafile.sha512sum = hashlib.sha512(content).hexdigest()
                datafile.url = 'file://' + path.abspath(f.name)
                datafile.save()

                def get_datafile(datafile):
                    return Dataset_File.objects.get(id=datafile.id)

                # Check that it won't verify as it stands
                expect(get_datafile(datafile).verified).to_be(False)
                verify_files()
                expect(get_datafile(datafile).verified).to_be(False)
                expect(get_datafile(datafile).is_local()).to_be(False)


                # Fill in the content
                f.write(content)
                f.flush()

                # Check it now verifies
                verify_files()
                expect(get_datafile(datafile).verified).to_be(True)
                expect(get_datafile(datafile).is_local()).to_be(True)
Beispiel #2
0
def add_staged_file_to_dataset(rel_filepath, dataset_id, username,
                               mimetype="application/octet-stream"):
    """
    add file in user's staging path to a dataset
    may be replaced by main code functions.
    quick and dirty hack to get it working
    """
    originfilepath = os.path.join(get_full_staging_path(username), rel_filepath)
    dataset = Dataset.objects.get(pk=dataset_id)
    newDatafile = Dataset_File()
    newDatafile.dataset = dataset
    newDatafile.size = os.path.getsize(originfilepath)
    newDatafile.protocol = "tardis"
    newDatafile.mimetype = mimetype
    file_dir = "/" + str(dataset.experiment.id) + "/" + str(dataset.id) + "/"
    file_path = file_dir + rel_filepath
    prelim_full_file_path = settings.FILE_STORE_PATH + file_path
    full_file_path = duplicate_file_check_rename(prelim_full_file_path)
    newDatafile.filename = os.path.basename(full_file_path)
    newDatafile.url = "%s://%s" % (newDatafile.protocol,
                                   full_file_path[
            len(settings.FILE_STORE_PATH) + len(file_dir):])
    if not os.path.exists(os.path.dirname(full_file_path)):
        os.makedirs(os.path.dirname(full_file_path))
    shutil.move(originfilepath, full_file_path)
    newDatafile.save()
    def testRemoteFile(self):
        content = urandom(1024)
        with NamedTemporaryFile() as f:
            # Create new Datafile
            datafile = Dataset_File(dataset=self.dataset)
            datafile.filename = 'background_task_testfile'
            datafile.size = len(content)
            datafile.sha512sum = hashlib.sha512(content).hexdigest()
            datafile.url = 'file://' + path.abspath(f.name)
            datafile.save()

            def get_datafile(datafile):
                return Dataset_File.objects.get(id=datafile.id)

            # Check that it won't verify as it stands
            expect(get_datafile(datafile).verified).to_be(False)
            verify_files()
            expect(get_datafile(datafile).verified).to_be(False)
            expect(get_datafile(datafile).is_local()).to_be(False)

            # Fill in the content
            f.write(content)
            f.flush()

            # Check it now verifies
            verify_files()
            expect(get_datafile(datafile).verified).to_be(True)
            expect(get_datafile(datafile).is_local()).to_be(True)
Beispiel #4
0
 def _make_dataset(self, exp, filenames):
     dataset = Dataset(experiment=exp)
     dataset.save()
     for filename in filenames:
         df = Dataset_File(dataset=dataset, size=41, protocol='file')
         df.filename = filename
         df.url = 'file://' + path.join(path.dirname(__file__), 'data', df.filename)
         df.save()
Beispiel #5
0
 def _make_dataset(self, exp, filenames):
     dataset = Dataset(experiment=exp)
     dataset.save()
     for filename in filenames:
         df = Dataset_File(dataset=dataset, size=41, protocol='file')
         df.filename = filename
         df.url = 'file://' + path.join(path.dirname(__file__), 'data',
                                        df.filename)
         df.save()
Beispiel #6
0
def _make_data_file(dataset, filename, content):
    # TODO:
    # create datasetfile

    f = mktemp()
    print "Inside make data file ", f
    open(f, "w+b").write(content)
    df = Dataset_File()
    df.dataset = dataset
    df.filename = filename
    df.url = 'file://'+f
    df.protocol = "staging"
    df.size = len(content)
    df.verify(allowEmptyChecksums=True)
    df.save()
    print "Df ---", df
Beispiel #7
0
    def testLocalFile(self):
        content = urandom(1024)
        cf = ContentFile(content, 'background_task_testfile')

        # Create new Datafile
        datafile = Dataset_File(dataset=self.dataset)
        datafile.filename = cf.name
        datafile.size = len(content)
        datafile.sha512sum = hashlib.sha512(content).hexdigest()
        datafile.url = write_uploaded_file_to_dataset(self.dataset, cf)
        datafile.save()

        def get_datafile(datafile):
            return Dataset_File.objects.get(id=datafile.id)

        # Check that it's not currently verified
        expect(get_datafile(datafile).verified).to_be(False)

        # Check it verifies
        verify_files()
        expect(get_datafile(datafile).verified).to_be(True)
    def testLocalFile(self):
        content = urandom(1024)
        cf = ContentFile(content, 'background_task_testfile')

        # Create new Datafile
        datafile = Dataset_File(dataset=self.dataset)
        datafile.filename = cf.name
        datafile.size = len(content)
        datafile.sha512sum = hashlib.sha512(content).hexdigest()
        datafile.url = write_uploaded_file_to_dataset(self.dataset, cf)
        datafile.save()

        def get_datafile(datafile):
            return Dataset_File.objects.get(id=datafile.id)

        # Check that it's not currently verified
        expect(get_datafile(datafile).verified).to_be(False)

        # Check it verifies
        verify_files()
        expect(get_datafile(datafile).verified).to_be(True)
Beispiel #9
0
def generate_datafile(path, dataset, content=None, size=-1, 
                      verify=True, verified=True):
    from tardis.tardis_portal.models import Dataset_File
    datafile = Dataset_File()
    # Normally we use any old string for the datafile path, but some
    # tests require the path to be the same as what 'staging' would use
    if path == None:
        datafile.dataset_id = dataset.id
        datafile.save()
        path = "%s/%s/%s" % (dataset.get_first_experiment().id,
                             dataset.id, datafile.id)

    filepath = os.path.normpath(FILE_STORE_PATH + '/' + path)
    if content:
        try:
            os.makedirs(os.path.dirname(filepath))
            os.remove(filepath)
        except:
            pass
        file = open(filepath, 'wb+')
        file.write(content)
        file.close()
    datafile.url = path
    datafile.mimetype = "application/unspecified"
    datafile.filename = os.path.basename(filepath)
    datafile.dataset_id = dataset.id
    if content:
        datafile.size = str(len(content))
    else:
        datafile.size = str(size)
    if verify and content:
        if not datafile.verify(allowEmptyChecksums=True):
            raise RuntimeError('verify failed!?!')
    else:
        datafile.verified = verified
    datafile.save()
    return datafile
Beispiel #10
0
def _create_datafile():
    user = User.objects.create_user("testuser", "*****@*****.**", "pwd")
    user.save()
    UserProfile(user=user).save()
    full_access = Experiment.PUBLIC_ACCESS_FULL
    experiment = Experiment.objects.create(title="IIIF Test", created_by=user, public_access=full_access)
    experiment.save()
    ExperimentACL(
        experiment=experiment,
        pluginId="django_user",
        entityId=str(user.id),
        isOwner=True,
        canRead=True,
        canWrite=True,
        canDelete=True,
        aclOwnershipType=ExperimentACL.OWNER_OWNED,
    ).save()
    dataset = Dataset()
    dataset.save()
    dataset.experiments.add(experiment)
    dataset.save()

    # Create new Datafile
    tempfile = TemporaryUploadedFile("iiif_stored_file", None, None, None)
    with Image(filename="magick:rose") as img:
        img.format = "tiff"
        img.save(file=tempfile.file)
        tempfile.file.flush()
    datafile = Dataset_File(dataset=dataset)
    datafile.size = os.path.getsize(tempfile.file.name)
    # os.remove(tempfilename)
    datafile.filename = "iiif_named_file"
    datafile.url = write_uploaded_file_to_dataset(dataset, tempfile)
    datafile.verify(allowEmptyChecksums=True)
    datafile.save()
    return datafile
Beispiel #11
0
def _create_datafile():
    user = User.objects.create_user('testuser', '*****@*****.**', 'pwd')
    user.save()
    UserProfile(user=user).save()
    full_access = Experiment.PUBLIC_ACCESS_FULL
    experiment = Experiment.objects.create(title="IIIF Test",
                                           created_by=user,
                                           public_access=full_access)
    experiment.save()
    ExperimentACL(experiment=experiment,
                  pluginId='django_user',
                  entityId=str(user.id),
                  isOwner=True,
                  canRead=True,
                  canWrite=True,
                  canDelete=True,
                  aclOwnershipType=ExperimentACL.OWNER_OWNED).save()
    dataset = Dataset()
    dataset.save()
    dataset.experiments.add(experiment)
    dataset.save()

    # Create new Datafile
    tempfile = TemporaryUploadedFile('iiif_stored_file', None, None, None)
    with Image(filename='magick:rose') as img:
        img.format = 'tiff'
        img.save(file=tempfile.file)
        tempfile.file.flush()
    datafile = Dataset_File(dataset=dataset)
    datafile.size = os.path.getsize(tempfile.file.name)
    #os.remove(tempfilename)
    datafile.filename = 'iiif_named_file'
    datafile.url = write_uploaded_file_to_dataset(dataset, tempfile)
    datafile.verify(allowEmptyChecksums=True)
    datafile.save()
    return datafile