Exemplo n.º 1
0
class MRtaskTestCase(TestCase):

    def setUp(self):
        from django.contrib.auth.models import User
        from tardis.tardis_portal.models import Experiment
        from tardis.tardis_portal.models import ExperimentACL
        from tardis.tardis_portal.models import Dataset
        from tardis.apps.mrtardis.mrtask import MRtask
        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)
        self.experiment = Experiment(approved=True,
                                     title="Test Experiment",
                                     institution_name="Test Institution",
                                     created_by=self.user,
                                     public=False)
        self.experiment.save()
        acl = ExperimentACL(pluginId="django_user",
                            entityId="1",
                            experiment=self.experiment,
                            canRead=True,
                            canWrite=True,
                            canDelete=True,
                            isOwner=True)
        acl.save()
        self.test_dataset = Dataset(experiment=self.experiment,
                                    description="test dataset")
        self.test_dataset.save()
        self.test_mrtask = MRtask(dataset=self.test_dataset)

    def test_mrtask(self):
        self.assertEqual(len(self.test_mrtask.parameters), 0)
Exemplo n.º 2
0
def register_squashfile(exp_id, epn, sq_dir, sq_filename, namespace):
    '''
    example:
    register_squashfile(456, '1234A', '/srv/squashstore', '1234A.squashfs',
        'http://synchrotron.org.au/mx/squashfsarchive/1')
    '''
    dfs = DataFile.objects.filter(filename=sq_filename,
                                  dataset__experiments__id=exp_id)
    if len(dfs) == 1:
        return dfs[0]
    e = Experiment.objects.get(id=exp_id)
    ds = Dataset(description="01 SquashFS Archive")
    ds.save()
    ds.experiments.add(e)
    filepath = os.path.join(sq_dir, sq_filename)
    try:
        md5sum = open(filepath + '.md5sum', 'r').read().strip()[:32]
    except:
        print 'no md5sum file found'
        return None
    size = os.path.getsize(filepath)
    df = DataFile(md5sum=md5sum,
                  filename=sq_filename,
                  size=str(size),
                  dataset=ds)
    df.save()
    schema = Schema.objects.filter(namespace=namespace)[0]
    ps = DatafileParameterSet(schema=schema, datafile=df)
    ps.save()
    ps.set_param('EPN', epn)
    sbox = StorageBox.objects.get(name='squashstore')
    dfo = DataFileObject(storage_box=sbox, datafile=df, uri=sq_filename)
    dfo.save()
    return df
Exemplo n.º 3
0
 def test_datasetwrapper(self):
     from mecat.models import DatasetWrapper, Sample
     sample_desc = "My Description for Sample created in test_datasetwrapper()"
     
     sample = Sample(experiment=self.experiment, description=sample_desc)
     sample.save()
     self.assertEqual(sample.description, sample_desc)
     self.assertEqual(sample.experiment, self.experiment)
      
     from tardis.tardis_portal.models import Dataset
     dataset_desc = "My Description for Dataset created in test_datasetwrapper()"       
     dataset = Dataset(description=dataset_desc, experiment=self.experiment)
     dataset.save()
     sample_from_db = Sample.objects.get(description=sample_desc)
     dataset_from_db = Dataset.objects.get(description=dataset_desc)
     datasetwrapper = DatasetWrapper(sample=sample_from_db, dataset=dataset_from_db)
     datasetwrapper.save()
     self.assertEqual(datasetwrapper.sample, sample_from_db)
     self.assertEqual(datasetwrapper.dataset, dataset_from_db)
     
     datasetwrapper_from_db = DatasetWrapper.objects.get(sample__description=sample_desc)
     self.assertEqual(datasetwrapper_from_db.dataset.pk, dataset_from_db.pk)
     
     
     
    def setUp(self):
        # Create test owner without enough details
        username, email, password = ('testuser',
                                     '*****@*****.**',
                                     'password')
        user = User.objects.create_user(username, email, password)
        profile = UserProfile(user=user, isDjangoAccount=True)
        profile.save()

        Location.force_initialize()

        # Create test experiment and make user the owner of it
        experiment = Experiment(title='Text Experiment',
                                institution_name='Test Uni',
                                created_by=user)
        experiment.save()
        acl = ObjectACL(
            pluginId='django_user',
            entityId=str(user.id),
            content_object=experiment,
            canRead=True,
            canWrite=True,
            canDelete=True,
            isOwner=True,
            aclOwnershipType=ObjectACL.OWNER_OWNED)
        acl.save()

        dataset = Dataset(description='dataset description...')
        dataset.save()
        dataset.experiments.add(experiment)
        dataset.save()

        def create_datafile(filename):
            testfile = path.join(path.dirname(__file__), 'fixtures',
                                 filename)

            size, sha512sum = get_size_and_sha512sum(testfile)

            datafile = Dataset_File(dataset=dataset,
                                    filename=path.basename(testfile),
                                    size=size,
                                    sha512sum=sha512sum)
            datafile.save()
            base_url = 'file://' + path.abspath(path.dirname(testfile))
            location = Location.load_location({
                'name': 'test-grabber', 'url': base_url, 'type': 'external',
                'priority': 10, 'transfer_provider': 'local'})
            replica = Replica(datafile=datafile,
                              url='file://'+path.abspath(testfile),
                              protocol='file',
                              location=location)
            replica.verify()
            replica.save()
            return Dataset_File.objects.get(pk=datafile.pk)

        self.dataset = dataset
        self.datafiles = [create_datafile('data_grabber_test1.admin'),
                          create_datafile('testfile.txt')
                         ] 
Exemplo n.º 5
0
    def setUp(self):
        # Create test owner without enough details
        username, email, password = ('testuser', '*****@*****.**',
                                     'password')
        user = User.objects.create_user(username, email, password)
        profile = UserProfile(user=user, isDjangoAccount=True)
        profile.save()

        Location.force_initialize()

        # Create test experiment and make user the owner of it
        experiment = Experiment(title='Text Experiment',
                                institution_name='Test Uni',
                                created_by=user)
        experiment.save()
        acl = ObjectACL(
            pluginId='django_user',
            entityId=str(user.id),
            content_object=experiment,
            canRead=True,
            isOwner=True,
            aclOwnershipType=ObjectACL.OWNER_OWNED,
        )
        acl.save()

        dataset = Dataset(description='dataset description...')
        dataset.save()
        dataset.experiments.add(experiment)
        dataset.save()

        def create_datafile(index):
            testfile = path.join(path.dirname(__file__), 'fixtures',
                                 'jeol_sem_test%d.txt' % index)

            size, sha512sum = get_size_and_sha512sum(testfile)

            datafile = Dataset_File(dataset=dataset,
                                    filename=path.basename(testfile),
                                    size=size,
                                    sha512sum=sha512sum)
            datafile.save()
            base_url = 'file://' + path.abspath(path.dirname(testfile))
            location = Location.load_location({
                'name': 'test-jeol',
                'url': base_url,
                'type': 'external',
                'priority': 10,
                'transfer_provider': 'local'
            })
            replica = Replica(datafile=datafile,
                              url='file://' + path.abspath(testfile),
                              protocol='file',
                              location=location)
            replica.verify()
            replica.save()
            return Dataset_File.objects.get(pk=datafile.pk)

        self.dataset = dataset
        self.datafiles = [create_datafile(i) for i in (1, 2)]
Exemplo n.º 6
0
def _create_test_dataset(nosDatafiles):
    ds_ = Dataset(description='happy snaps of plumage')
    ds_.save()
    for i in range (0, nosDatafiles) :
        df_ = Dataset_File(dataset=ds_, url='http://planet-python.org/' + str(_next_id()))
        df_.save()
    ds_.save()
    return ds_
Exemplo n.º 7
0
 def _make_dataset(self, exp, filenames):
     dataset = Dataset(experiment=exp)
     dataset.save()
     for filename in filenames:
         df = Dataset_File(dataset=dataset, size=41, protocol='file')
         df.filename = filename
         df.url = 'file://' + path.join(path.dirname(__file__), 'data', df.filename)
         df.save()
Exemplo n.º 8
0
def _create_test_dataset(nosDatafiles):
    ds_ = Dataset(description='happy snaps of plumage')
    ds_.save()
    for i in range(0, nosDatafiles):
        df_ = Dataset_File(dataset=ds_,
                           url='http://planet-python.org/' + str(_next_id()))
        df_.save()
    ds_.save()
    return ds_
Exemplo n.º 9
0
 def _make_dataset(self, exp, filenames):
     dataset = Dataset(experiment=exp)
     dataset.save()
     for filename in filenames:
         df = Dataset_File(dataset=dataset, size=41, protocol='file')
         df.filename = filename
         df.url = 'file://' + path.join(path.dirname(__file__), 'data',
                                        df.filename)
         df.save()
def _create_test_dataset(nosDatafiles):
    ds_ = Dataset(description='happy snaps of plumage')
    ds_.save()
    for i in range(0, nosDatafiles):
        df_ = DataFile(dataset=ds_, filename='file_%d' % i, size='21',
                       sha512sum='bogus')
        df_.save()
    ds_.save()
    return ds_
Exemplo n.º 11
0
    def setUp(self):
        raise SkipTest  # temporarily disabling this feature, needs coding
        from tempfile import mkdtemp, mktemp
        from django.conf import settings
        import os

        # Disconnect post_save signal
        from django.db.models.signals import post_save
        from tardis.tardis_portal.models import Experiment, \
            staging_hook, Dataset, DataFile, DataFileObject, StorageBox
        post_save.disconnect(staging_hook, sender=DataFileObject)

        from django.contrib.auth.models import User
        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)

        try:
            os.makedirs(settings.GET_FULL_STAGING_PATH_TEST)
        except OSError:
            pass
        self.temp = mkdtemp(dir=settings.GET_FULL_STAGING_PATH_TEST)

        self.filepath = mktemp(dir=self.temp)
        content = 'test file'
        with open(self.filepath, "w+b") as f:
            f.write(content)

        # make datafile
        exp = Experiment(title='test exp1',
                         institution_name='monash',
                         created_by=self.user)
        exp.save()

        # make dataset
        dataset = Dataset(description="dataset description...")
        dataset.save()
        dataset.experiments.add(exp)
        dataset.save()

        # create datafile
        df = DataFile(dataset=dataset, size=len(content),
                      filename=path.basename(self.file),
                      md5sum='f20d9f2072bbeb6691c0f9c5099b01f3')
        df.save()

        # create replica
        base_url = settings.GET_FULL_STAGING_PATH_TEST
        s_box = StorageBox.get_default_storage(location=base_url)
        dfo = DataFileObject(datafile=df,
                             uri=self.filepath,
                             storage_box=s_box)
        dfo.save()
        self.dfo = dfo
Exemplo n.º 12
0
def generate_dataset(datafiles=[], experiments=[]):
    from tardis.tardis_portal.models import Dataset
    dataset = Dataset()
    dataset.save()
    for df in datafiles:
        df.dataset_id = dataset.id
        df.save()
    for exp in experiments:
        dataset.experiments.add(exp)
    dataset.save()
    return dataset
Exemplo n.º 13
0
def generate_dataset(datafiles=[], experiments=[]):
    from tardis.tardis_portal.models import Dataset
    dataset = Dataset()
    dataset.save()
    for df in datafiles:
        df.dataset_id = dataset.id
        df.save()
    for exp in experiments:
        dataset.experiments.add(exp)
    dataset.save()
    return dataset
Exemplo n.º 14
0
def _create_test_dataset(nosDatafiles):
    ds_ = Dataset(description='happy snaps of plumage')
    ds_.save()
    for i in range(0, nosDatafiles):
        df_ = DataFile(dataset=ds_,
                       filename='file_%d' % i,
                       size='21',
                       sha512sum='bogus')
        df_.save()
    ds_.save()
    return ds_
Exemplo n.º 15
0
def _create_test_dataset(nosDatafiles):
    ds_ = Dataset(description='happy snaps of plumage')
    ds_.save()
    for i in range(0, nosDatafiles):
        df_ = Dataset_File(dataset=ds_, size='21', sha512sum='bogus')
        df_.save()
        rep_ = Replica(datafile=df_,
                       url='http://planet-python.org/' + str(_next_id()),
                       location=Location.get_default_location())
        rep_.save()
    ds_.save()
    return ds_
Exemplo n.º 16
0
    def setUp(self):
        # Create test owner without enough details
        username, email, password = ('testuser',
                                     '*****@*****.**',
                                     'password')
        user = User.objects.create_user(username, email, password)

        # Create test experiment and make user the owner of it
        experiment = Experiment(title='Text Experiment',
                                institution_name='Test Uni',
                                created_by=user)
        experiment.save()
        acl = ObjectACL(
            pluginId='django_user',
            entityId=str(user.id),
            content_object=experiment,
            canRead=True,
            isOwner=True,
            aclOwnershipType=ObjectACL.OWNER_OWNED,
        )
        acl.save()

        dataset = Dataset(description='dataset description...')
        dataset.save()
        dataset.experiments.add(experiment)
        dataset.save()

        base_path = path.join(path.dirname(__file__), 'fixtures')
        s_box = StorageBox.get_default_storage(location=base_path)

        def create_datafile(index):
            testfile = path.join(base_path, 'middleware_test%d.txt' % index)

            size, sha512sum = get_size_and_sha512sum(testfile)

            datafile = DataFile(dataset=dataset,
                                filename=path.basename(testfile),
                                size=size,
                                sha512sum=sha512sum)
            datafile.save()
            dfo = DataFileObject(
                datafile=datafile,
                storage_box=s_box,
                uri=path.basename(testfile))
            dfo.save()

            if index != 1:
                dfo.verified = False
                dfo.save(update_fields=['verified'])
            return DataFile.objects.get(pk=datafile.pk)

        self.dataset = dataset
        self.datafiles = [create_datafile(i) for i in (1, 2)]
Exemplo n.º 17
0
def _create_test_dataset(nosDatafiles):
    ds_ = Dataset(description='happy snaps of plumage')
    ds_.save()
    for i in range (0, nosDatafiles) :
        df_ = Dataset_File(dataset=ds_, size='21', sha512sum='bogus')
        df_.save()
        rep_ = Replica(datafile=df_,
                       url='http://planet-python.org/' + str(_next_id()),
                       location=Location.get_default_location())
        rep_.save()
    ds_.save()
    return ds_
Exemplo n.º 18
0
    def test_urls_with_some_content(self):
        # Things that might tend to be in a real live system
        user = '******'
        pwd = User.objects.make_random_password()
        user = User.objects.create(username=user,
                                   email='*****@*****.**',
                                   first_name="Test", last_name="User")
        user.set_password(pwd)
        user.save()
        experiment = Experiment.objects.create(
            title="Test Experiment",
            created_by=user,
            public_access=Experiment.PUBLIC_ACCESS_FULL)
        experiment.save()
        acl = ObjectACL(pluginId=django_user,
                        entityId=str(user.id),
                        content_object=experiment,
                        canRead=True,
                        canWrite=True,
                        canDelete=True,
                        isOwner=True)
        acl.save()
        dataset = Dataset(description="test dataset")
        dataset.save()
        dataset.experiments.add(experiment)
        dataset.save()

        # Test everything works
        c = Client()
        c.login(username=user, password=pwd)
        urls = ['/about/', '/stats/']
        urls += ['/experiment/list/%s' % part
                 for part in ('mine', 'shared', 'public')]
        # urls += ['/experiment/%s/' % part
        #          for part in ('search',)]
        urls += ['/experiment/view/%d/' % experiment.id]
        urls += ['/ajax/experiment/%d/%s' % (experiment.id, tabpane)
                 for tabpane in ('description', 'datasets', 'rights')]
        urls += ['/ajax/datafile_list/%d/' % dataset.id]
        urls += ['/ajax/dataset_metadata/%d/' % dataset.id]

        for u in urls:
            response = c.get(u)
            ensure(response.status_code, 200,
                   "%s should have returned 200 but returned %d"
                   % (u, response.status_code))

        redirect_urls = ['/experiment/list', '/experiment/view/']

        for u in redirect_urls:
            response = c.get(u)
            expect(response.status_code).to_equal(302)
Exemplo n.º 19
0
 def _create_dataset(self):
     user = User.objects.create_user('testuser', '*****@*****.**', 'pwd')
     user.save()
     full_access = Experiment.PUBLIC_ACCESS_FULL
     experiment = Experiment.objects.create(title="Background Test",
                                            created_by=user,
                                            public_access=full_access)
     experiment.save()
     dataset = Dataset()
     dataset.save()
     dataset.experiments.add(experiment)
     dataset.save()
     return dataset
Exemplo n.º 20
0
 def _create_dataset(self):
     user = User.objects.create_user('testuser', '*****@*****.**', 'pwd')
     user.save()
     full_access = Experiment.PUBLIC_ACCESS_FULL
     experiment = Experiment.objects.create(title="Background Test",
                                            created_by=user,
                                            public_access=full_access)
     experiment.save()
     dataset = Dataset()
     dataset.save()
     dataset.experiments.add(experiment)
     dataset.save()
     return dataset
Exemplo n.º 21
0
    def setUp(self):
        # Create test owner without enough details
        username, email, password = ('testuser',
                                     '*****@*****.**',
                                     'password')
        user = User.objects.create_user(username, email, password)
        profile = UserProfile(user=user, isDjangoAccount=True)
        profile.save()

        # Create test experiment and make user the owner of it
        experiment = Experiment(title='Text Experiment',
                                institution_name='Test Uni',
                                created_by=user)
        experiment.save()
        acl = ExperimentACL(
            pluginId='django_user',
            entityId=str(user.id),
            experiment=experiment,
            canRead=True,
            isOwner=True,
            aclOwnershipType=ExperimentACL.OWNER_OWNED,
            )
        acl.save()

        dataset = Dataset(description='dataset description...')
        dataset.save()
        dataset.experiments.add(experiment)
        dataset.save()

        def create_datafile(index):
            testfile = path.join(path.dirname(__file__), 'fixtures',
                                 'jeol_sem_test%d.txt' % index)

            size, sha512sum = get_size_and_sha512sum(testfile)

            datafile = Dataset_File(dataset=dataset,
                                    filename=path.basename(testfile),
                                    url='file://'+path.abspath(testfile),
                                    protocol='file',
                                    size=size,
                                    sha512sum=sha512sum)
            datafile.verify()
            datafile.save()
            return datafile

        self.dataset = dataset
        self.datafiles = [create_datafile(i) for i in (1,2)]
Exemplo n.º 22
0
    def test_contextual_view(self):
        """
            Given schema on dataset, check that  image file created
        """
        user = _create_test_user()
        license = _create_license()
        exp = _create_test_experiment(user, license)
        ds = Dataset(description='happy snaps of plumage')
        ds.save()
        ds = _create_test_dataset(
            ds, exp.id, {
                "output.dat": 'test data\n',
                "grexp.dat": '1 2\n2 3\n3 7\n',
                "grfinal21.dat": '1 2\n 2 4\n4 9\n'
            })

        sch = Schema(namespace=self.HRMCSCHEMA,
                     name="hrmc_views",
                     type=Schema.DATASET)
        sch.save()

        param = ParameterName(schema=sch,
                              name="plot",
                              full_name="scatterplot",
                              units="image",
                              data_type=ParameterName.FILENAME)
        param.save()

        dps = DatasetParameterSet(schema=sch, dataset=ds)
        dps.save()

        ds.experiments.add(exp)
        ds.save()

        client = Client()
        response = client.get('/dataset/%s' % ds.id)
        self.assertEqual(response.status_code, 200)

        param_sets = get_param_sets(ds)
        self.assertTrue(param_sets)

        dp = DatasetParameter.objects.get(parameterset=param_sets[0],
                                          name=param)

        self.assertTrue(dp)
        self.assertNotEquals(dp.string_value, "")  # ie, it has a filename
Exemplo n.º 23
0
    def test_003_offline_dataset(self, mock_stat):
        """A dataset should be offline if any datafiles are offline"""
        mock_stat.return_value = Stats(st_size=10000,
                                       st_blocks=0,
                                       st_mtime=datetime.now())
        ds = Dataset(description="Dataset2", instrument=self.inst)
        ds.save()

        df2 = DataFile(dataset=ds, filename="test_file.jpg")
        df2.save()
        dfo2 = DataFileObject(datafile=df2,
                              storage_box=self.sbox1,
                              uri=df2.filename)
        dfo2.save()
        df2.verify()

        self.assertFalse(dataset_online(ds))
Exemplo n.º 24
0
    def test_hrmc_filter(self):
        """
           Make an experiment, lood up grexp file and check
           dataset schema missing, then loadup grfinal and check dataset schema
           created
        """
        user = _create_test_user()
        license = _create_license()
        exp = _create_test_experiment(user, license)
        ds = Dataset(description='happy snaps of plumage')
        ds.save()
        _create_test_dataset(ds, exp.id, {
            "output.dat": 'hello',
            "grexp.dat": '2 5\n6 15\n'
        })
        ds.experiments.add(exp)
        ds.save()

        sch = Schema(namespace=self.HRMCSCHEMA,
                     name="hrmc_views",
                     type=Schema.DATASET)
        sch.save()

        param = ParameterName(schema=sch,
                              name="plot",
                              full_name="scatterplot",
                              units="image",
                              data_type=ParameterName.FILENAME)
        param.save()

        param_sets = get_param_sets(ds)
        self.assertEquals(list(param_sets), [])

        _create_test_dataset(ds, exp.id, {'grfinal21.dat': "1 3\n5 14\n"})

        df2 = Dataset_File(dataset=ds, url='path/grfinal21.dat')
        df2.save()

        h = hrmc.HRMCOutput('HRMC', self.HRMCSCHEMA)
        h(sender=Dataset_File, instance=df2)

        param_sets = get_param_sets(ds)
        self.assertEquals([x.schema.namespace for x in param_sets],
                          [self.HRMCSCHEMA])
Exemplo n.º 25
0
def _create_datafile():
    user = User.objects.create_user('testuser', '*****@*****.**', 'pwd')
    user.save()

    full_access = Experiment.PUBLIC_ACCESS_FULL
    experiment = Experiment.objects.create(title="IIIF Test",
                                           created_by=user,
                                           public_access=full_access)
    experiment.save()
    ObjectACL(content_object=experiment,
              pluginId='django_user',
              entityId=str(user.id),
              isOwner=True,
              canRead=True,
              canWrite=True,
              canDelete=True,
              aclOwnershipType=ObjectACL.OWNER_OWNED).save()
    dataset = Dataset()
    dataset.save()
    dataset.experiments.add(experiment)
    dataset.save()

    # Create new Datafile
    tempfile = TemporaryUploadedFile('iiif_stored_file', None, None, None)
    with Image(filename='magick:rose') as img:
        img.format = 'tiff'
        img.save(file=tempfile.file)
        tempfile.file.flush()
    datafile = DataFile(dataset=dataset,
                        size=os.path.getsize(tempfile.file.name),
                        filename='iiif_named_file',
                        mimetype='image/tiff')
    compute_md5 = getattr(settings, 'COMPUTE_MD5', True)
    compute_sha512 = getattr(settings, 'COMPUTE_SHA512', True)
    checksums = compute_checksums(open(tempfile.file.name, 'r'),
                                  compute_md5=compute_md5,
                                  compute_sha512=compute_sha512)
    if compute_md5:
        datafile.md5sum = checksums['md5sum']
    if compute_sha512:
        datafile.sha512sum = checksums['sha512sum']
    datafile.save()
    datafile.file_object = tempfile
    return datafile
Exemplo n.º 26
0
def _create_datafile():
    user = User.objects.create_user('testuser', '*****@*****.**', 'pwd')
    user.save()
    UserProfile(user=user).save()

    Location.force_initialize()

    full_access = Experiment.PUBLIC_ACCESS_FULL
    experiment = Experiment.objects.create(title="IIIF Test",
                                           created_by=user,
                                           public_access=full_access)
    experiment.save()
    ObjectACL(content_object=experiment,
              pluginId='django_user',
              entityId=str(user.id),
              isOwner=True,
              canRead=True,
              canWrite=True,
              canDelete=True,
              aclOwnershipType=ObjectACL.OWNER_OWNED).save()
    dataset = Dataset()
    dataset.save()
    dataset.experiments.add(experiment)
    dataset.save()

    # Create new Datafile
    tempfile = TemporaryUploadedFile('iiif_stored_file', None, None, None)
    with Image(filename='magick:rose') as img:
        img.format = 'tiff'
        img.save(file=tempfile.file)
        tempfile.file.flush()
    datafile = Dataset_File(dataset=dataset,
                            size=os.path.getsize(tempfile.file.name),
                            filename='iiif_named_file')
    replica = Replica(datafile=datafile,
                      url=write_uploaded_file_to_dataset(dataset, tempfile),
                      location=Location.get_default_location())
    replica.verify(allowEmptyChecksums=True)
    datafile.save()
    replica.datafile = datafile
    replica.save()
    return datafile
    def test_contextual_view(self):
        """
            Given schema on dataset, check that  image file created
        """
        user = _create_test_user()
        license = _create_license()
        exp = _create_test_experiment(user, license)
        ds = Dataset(description='happy snaps of plumage')
        ds.save()
        ds = _create_test_dataset(ds, exp.id, {
            "output.dat": 'test data\n',
            "grexp.dat": '1 2\n2 3\n3 7\n',
            "grfinal21.dat": '1 2\n 2 4\n4 9\n'})

        sch = Schema(namespace=self.HRMCSCHEMA,
            name="hrmc_views", type=Schema.DATASET)
        sch.save()

        param = ParameterName(schema=sch, name="plot",
            full_name="scatterplot", units="image",
            data_type=ParameterName.FILENAME
            )
        param.save()

        dps = DatasetParameterSet(schema=sch, dataset=ds)
        dps.save()

        ds.experiments.add(exp)
        ds.save()

        client = Client()
        response = client.get('/dataset/%s' % ds.id)
        self.assertEqual(response.status_code, 200)

        param_sets = get_param_sets(ds)
        self.assertTrue(param_sets)

        dp = DatasetParameter.objects.get(parameterset=param_sets[0],
            name=param)

        self.assertTrue(dp)
        self.assertNotEquals(dp.string_value, "")  # ie, it has a filename
Exemplo n.º 28
0
def _create_datafile():
    user = User.objects.create_user("testuser", "*****@*****.**", "pwd")
    user.save()
    UserProfile(user=user).save()

    Location.force_initialize()

    full_access = Experiment.PUBLIC_ACCESS_FULL
    experiment = Experiment.objects.create(title="IIIF Test", created_by=user, public_access=full_access)
    experiment.save()
    ObjectACL(
        content_object=experiment,
        pluginId="django_user",
        entityId=str(user.id),
        isOwner=True,
        canRead=True,
        canWrite=True,
        canDelete=True,
        aclOwnershipType=ObjectACL.OWNER_OWNED,
    ).save()
    dataset = Dataset()
    dataset.save()
    dataset.experiments.add(experiment)
    dataset.save()

    # Create new Datafile
    tempfile = TemporaryUploadedFile("iiif_stored_file", None, None, None)
    with Image(filename="magick:rose") as img:
        img.format = "tiff"
        img.save(file=tempfile.file)
        tempfile.file.flush()
    datafile = Dataset_File(dataset=dataset, size=os.path.getsize(tempfile.file.name), filename="iiif_named_file")
    replica = Replica(
        datafile=datafile,
        url=write_uploaded_file_to_dataset(dataset, tempfile),
        location=Location.get_default_location(),
    )
    replica.verify(allowEmptyChecksums=True)
    datafile.save()
    replica.datafile = datafile
    replica.save()
    return datafile
    def test_hrmc_filter(self):
        """
           Make an experiment, lood up grexp file and check
           dataset schema missing, then loadup grfinal and check dataset schema
           created
        """
        user = _create_test_user()
        license = _create_license()
        exp = _create_test_experiment(user, license)
        ds = Dataset(description='happy snaps of plumage')
        ds.save()
        _create_test_dataset(ds, exp.id,
            {"output.dat": 'hello', "grexp.dat": '2 5\n6 15\n'})
        ds.experiments.add(exp)
        ds.save()

        sch = Schema(namespace=self.HRMCSCHEMA,
            name="hrmc_views", type=Schema.DATASET)
        sch.save()

        param = ParameterName(schema=sch, name="plot",
            full_name="scatterplot", units="image",
            data_type=ParameterName.FILENAME
            )
        param.save()

        param_sets = get_param_sets(ds)
        self.assertEquals(list(param_sets), [])

        _create_test_dataset(ds, exp.id, {'grfinal21.dat': "1 3\n5 14\n"})

        df2 = Dataset_File(dataset=ds, url='path/grfinal21.dat')
        df2.save()

        h = hrmc.HRMCOutput('HRMC', self.HRMCSCHEMA)
        h(sender=Dataset_File, instance=df2)

        param_sets = get_param_sets(ds)
        self.assertEquals([x.schema.namespace for x in param_sets],
            [self.HRMCSCHEMA])
Exemplo n.º 30
0
 def test_deleting_dfo_without_uri(self):
     dataset = Dataset(description="dataset description")
     dataset.save()
     save1 = settings.REQUIRE_DATAFILE_SIZES
     save2 = settings.REQUIRE_DATAFILE_CHECKSUMS
     try:
         settings.REQUIRE_DATAFILE_SIZES = False
         settings.REQUIRE_DATAFILE_CHECKSUMS = False
         datafile = DataFile(dataset=dataset, filename='test1.txt')
         datafile.save()
     finally:
         settings.REQUIRE_DATAFILE_SIZES = save1
         settings.REQUIRE_DATAFILE_CHECKSUMS = save2
     dfo = DataFileObject(
             datafile=datafile,
             storage_box=datafile.get_default_storage_box(),
             uri=None)
     dfo.save()
     self.assertIsNone(dfo.uri)
     self.assertIsNotNone(dfo.id)
     dfo.delete()
     self.assertIsNone(dfo.id)
Exemplo n.º 31
0
def add_dataset(request, experiment_id):
    if not has_experiment_write(request, experiment_id):
        return HttpResponseForbidden()

    # Process form or prepopulate it
    if request.method == 'POST':
        form = DatasetForm(request.POST)
        if form.is_valid():
            dataset = Dataset()
            dataset.description = form.cleaned_data['description']
            dataset.save()
            experiment = Experiment.objects.get(id=experiment_id)
            dataset.experiments.add(experiment)
            dataset.save()
            return _redirect_303('tardis_portal.view_dataset', dataset.id)
    else:
        form = DatasetForm()

    c = {'form': form}
    return HttpResponse(
        render_response_index(request,
                              'tardis_portal/add_or_edit_dataset.html', c))
Exemplo n.º 32
0
def add_dataset(request, experiment_id):
    if not has_experiment_write(request, experiment_id):
        return HttpResponseForbidden()

    # Process form or prepopulate it
    if request.method == 'POST':
        form = DatasetForm(request.POST)
        if form.is_valid():
            dataset = Dataset()
            dataset.description = form.cleaned_data['description']
            dataset.save()
            experiment = Experiment.objects.get(id=experiment_id)
            dataset.experiments.add(experiment)
            dataset.save()
            return _redirect_303('tardis_portal.view_dataset',
                                 dataset.id)
    else:
        form = DatasetForm()

    c = {'form': form}
    return HttpResponse(render_response_index(request,
                        'tardis_portal/add_or_edit_dataset.html', c))
Exemplo n.º 33
0
    def test_dataset(self):
        exp = Experiment(title='test exp1',
                         institution_name='monash',
                         created_by=self.user)

        exp.save()
        exp2 = Experiment(title='test exp2',
                          institution_name='monash',
                          created_by=self.user)
        exp2.save()

        group = Group(name="Test Manager Group")
        group.save()
        group.user_set.add(self.user)
        facility = Facility(name="Test Facility", manager_group=group)
        facility.save()
        instrument = Instrument(name="Test Instrument", facility=facility)
        instrument.save()

        dataset = Dataset(description='test dataset1')
        dataset.instrument = instrument
        dataset.save()
        dataset.experiments.set([exp, exp2])
        dataset.save()
        dataset_id = dataset.id

        del dataset
        dataset = Dataset.objects.get(pk=dataset_id)

        self.assertEqual(dataset.description, 'test dataset1')
        self.assertEqual(dataset.experiments.count(), 2)
        self.assertIn(exp, list(dataset.experiments.iterator()))
        self.assertIn(exp2, list(dataset.experiments.iterator()))
        self.assertEqual(instrument, dataset.instrument)
        target_id = Dataset.objects.first().id
        self.assertEqual(
            dataset.get_absolute_url(), '/dataset/%d' % target_id,
            dataset.get_absolute_url() + ' != /dataset/%d' % target_id)
Exemplo n.º 34
0
    def get_or_create_dataset(self, name, top=None):
        '''
        returns existing or created dataset given a name

        returns False if the dataset is not unique by name

        top is the directory
        '''
        ds = Dataset.objects.filter(description=name,
                                    experiments=self.experiment)
        if len(ds) == 1:
            return ds[0]
        elif len(ds) > 1:
            return False
        ds = Dataset(description=name)
        if top is not None:
            ds.directory = top
            ds.save()
            self.tag_user(ds, top)
        else:
            ds.save()
        ds.experiments.add(self.experiment)
        return ds
Exemplo n.º 35
0
def _create_datafile():
    user = User.objects.create_user("testuser", "*****@*****.**", "pwd")
    user.save()

    full_access = Experiment.PUBLIC_ACCESS_FULL
    experiment = Experiment.objects.create(title="IIIF Test", created_by=user, public_access=full_access)
    experiment.save()
    ObjectACL(
        content_object=experiment,
        pluginId="django_user",
        entityId=str(user.id),
        isOwner=True,
        canRead=True,
        canWrite=True,
        canDelete=True,
        aclOwnershipType=ObjectACL.OWNER_OWNED,
    ).save()
    dataset = Dataset()
    dataset.save()
    dataset.experiments.add(experiment)
    dataset.save()

    # Create new Datafile
    tempfile = TemporaryUploadedFile("iiif_stored_file", None, None, None)
    with Image(filename="magick:rose") as img:
        img.format = "tiff"
        img.save(file=tempfile.file)
        tempfile.file.flush()
    datafile = DataFile(
        dataset=dataset, size=os.path.getsize(tempfile.file.name), filename="iiif_named_file", mimetype="image/tiff"
    )
    checksums = compute_checksums(open(tempfile.file.name, "r"))
    datafile.md5sum = checksums["md5sum"]
    datafile.sha512sum = checksums["sha512sum"]
    datafile.save()
    datafile.file_object = tempfile
    return datafile
Exemplo n.º 36
0
 def _prepare_project(self):
     project = Project(experiment=self.experiment)
     project.save()
     s1 = Sample(experiment=self.experiment, name="S1", description="s1 desc")
     s1.save()
     s2 = Sample(experiment=self.experiment, name="S2", description="s2 desc")
     s2.save()
     dw1 = DatasetWrapper(sample=s1, name="dw1", description="dw1 desc")
     dw2 = DatasetWrapper(sample=s1, name="dw2", description="dw2 desc")
     dw3 = DatasetWrapper(sample=s2, name="dw3", description="dw3 desc")
     ds1 = Dataset(experiment=self.experiment, description=dw1.description)
     ds2 = Dataset(experiment=self.experiment, description=dw2.description)
     ds3 = Dataset(experiment=self.experiment, description=dw3.description)
     ds1.save()
     ds2.save()
     ds3.save()
     dw1.dataset = ds1
     dw1.save()
     dw2.dataset = ds2
     dw2.save()
     dw3.dataset = ds3
     dw3.save()
     return project
Exemplo n.º 37
0
class DownloadTestCase(TestCase):

    def setUp(self):
        # create a test user
        self.user = User.objects.create_user(username='******',
                                             email='',
                                             password='******')

        Location.force_initialize()

        # create a public experiment
        self.experiment1 = Experiment(title='Experiment 1',
                                      created_by=self.user,
                                      public_access=Experiment.PUBLIC_ACCESS_FULL)
        self.experiment1.save()

        # create a non-public experiment
        self.experiment2 = Experiment(title='Experiment 2',
                                      created_by=self.user,
                                      public_access=Experiment.PUBLIC_ACCESS_NONE)
        self.experiment2.save()

        # dataset1 belongs to experiment1
        self.dataset1 = Dataset()
        self.dataset1.save()
        self.dataset1.experiments.add(self.experiment1)
        self.dataset1.save()

        # dataset2 belongs to experiment2
        self.dataset2 = Dataset()
        self.dataset2.save()
        self.dataset2.experiments.add(self.experiment2)
        self.dataset2.save()

        # absolute path first
        filename1 = 'testfile.txt'
        filename2 = 'testfile.tiff'
        self.dest1 = abspath(join(settings.FILE_STORE_PATH, '%s/%s/'
                                  % (self.experiment1.id,
                                  self.dataset1.id)))
        self.dest2 = abspath(join(settings.FILE_STORE_PATH,
                                '%s/%s/'
                                  % (self.experiment2.id,
                                  self.dataset2.id)))
        if not exists(self.dest1):
            makedirs(self.dest1)
        if not exists(self.dest2):
            makedirs(self.dest2)

        testfile1 = abspath(join(self.dest1, filename1))
        f = open(testfile1, 'w')
        f.write("Hello World!\n")
        f.close()

        testfile2 = abspath(join(self.dest2, filename2))
        _generate_test_image(testfile2)

        self.datafile1 = self._build_datafile( \
            testfile1, filename1, self.dataset1,
            '%d/%d/%s' % (self.experiment1.id, self.dataset1.id, filename1))
                          
        self.datafile2 = self._build_datafile( \
            testfile2, filename2, self.dataset2,
            '%d/%d/%s' % (self.experiment2.id, self.dataset2.id, filename2))

    def _build_datafile(self, testfile, filename, dataset, url, 
                        protocol='', checksum=None, size=None, mimetype=''):
        filesize, sha512sum = get_size_and_sha512sum(testfile)
        datafile = Dataset_File(dataset=dataset, filename=filename,
                                mimetype=mimetype,
                                size=str(size if size != None else filesize), 
                                sha512sum=(checksum if checksum else sha512sum))
        datafile.save()
        if urlparse.urlparse(url).scheme == '':
            location = Location.get_location('local')
        else:
            location = Location.get_location_for_url(url)
            if not location:
                location = Location.load_location({
                    'name': filename, 'url': urlparse.urljoin(url, '.'), 
                    'type': 'external', 
                    'priority': 10, 'transfer_provider': 'local'})
        replica = Replica(datafile=datafile, protocol=protocol, url=url,
                          location=location)
        replica.verify()
        replica.save()
        return Dataset_File.objects.get(pk=datafile.pk)

    def tearDown(self):
        self.user.delete()
        self.experiment1.delete()
        self.experiment2.delete()
        rmtree(self.dest1)
        rmtree(self.dest2)

    def testView(self):
        client = Client()

        # check view of file1
        response = client.get('/datafile/view/%i/' % self.datafile1.id)

        self.assertEqual(response['Content-Disposition'],
                         'inline; filename="%s"'
                         % self.datafile1.filename)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.content, 'Hello World!\n')

        # check view of file2
        response = client.get('/datafile/view/%i/' % self.datafile2.id)
        # Should be forbidden
        self.assertEqual(response.status_code, 403)

        self.experiment2.public_access=Experiment.PUBLIC_ACCESS_FULL
        self.experiment2.save()
        # check view of file2 again
        response = client.get('/datafile/view/%i/' % self.datafile2.id)
        self.assertEqual(response.status_code, 200)

        # The following behaviour relies on ImageMagick
        if IMAGEMAGICK_AVAILABLE:
            # file2 should have a ".png" filename
            self.assertEqual(response['Content-Disposition'],
                             'inline; filename="%s"'
                             % (self.datafile2.filename+'.png'))
            # file2 should be a PNG
            self.assertEqual(response['Content-Type'], 'image/png')
            png_signature = "\x89PNG\r\n\x1a\n"
            self.assertEqual(response.content[0:8], png_signature)
        else:
            # file2 should have a ".tiff" filename
            self.assertEqual(response['Content-Disposition'],
                             'inline; filename="%s"'
                             % (self.datafile2.filename))
            # file2 should be a TIFF
            self.assertEqual(response['Content-Type'], 'image/tiff')
            tiff_signature = "II\x2a\x00"
            self.assertEqual(response.content[0:4], tiff_signature)

    def _check_tar_file(self, content, rootdir, datafiles,
                        simpleNames=False, noTxt=False):
        with NamedTemporaryFile('w') as tempfile:
            tempfile.write(content)
            tempfile.flush()
            if getsize(tempfile.name) > 0:
                expect(is_tarfile(tempfile.name)).to_be_truthy()
                try:
                    tf = TarFile(tempfile.name, 'r')
                    self._check_names(datafiles, tf.getnames(), 
                                      rootdir, simpleNames, noTxt)
                finally:
                    tf.close()
            else:
                self._check_names(datafiles, [], 
                                  rootdir, simpleNames, noTxt)

    def _check_zip_file(self, content, rootdir, datafiles, 
                        simpleNames=False, noTxt=False):
        with NamedTemporaryFile('w') as tempfile:
            tempfile.write(content)
            tempfile.flush()
            # It should be a zip file
            expect(is_zipfile(tempfile.name)).to_be_truthy()
            try:
                zf = ZipFile(tempfile.name, 'r')
                self._check_names(datafiles, zf.namelist(), 
                                  rootdir, simpleNames, noTxt)
            finally:
                zf.close()

    def _check_names(self, datafiles, names, rootdir, simpleNames, noTxt):
        # SimpleNames says if we expect basenames or pathnames
        # NoTxt says if we expect '.txt' files to be filtered out
        if not noTxt:
            expect(len(names)).to_equal(len(datafiles))
        for df in datafiles:
            if simpleNames:
                filename = df.filename
            else:
                filename = join(rootdir, str(df.dataset.id), 
                                df.filename)
            expect(filename in names).to_be(
                not (noTxt and filename.endswith('.txt')))
        

    def testDownload(self):
        client = Client()

        # check download for experiment1
        response = client.get('/download/experiment/%i/zip/' % \
                                  self.experiment1.id)
        self.assertEqual(response['Content-Disposition'],
                         'attachment; filename="experiment%s-complete.zip"'
                         % self.experiment1.id)
        self.assertEqual(response.status_code, 200)
        self._check_zip_file(
            response.content, str(self.experiment1.id),
            reduce(lambda x, y: x + y,
                   [ds.dataset_file_set.all() \
                        for ds in self.experiment1.datasets.all()]))
                   
        # check download for experiment1 as tar
        response = client.get('/download/experiment/%i/tar/' % \
                                  self.experiment1.id)
        self.assertEqual(response['Content-Disposition'],
                         'attachment; filename="experiment%s-complete.tar"'
                         % self.experiment1.id)
        self.assertEqual(response.status_code, 200)
        self._check_tar_file(
            response.content, str(self.experiment1.id),
            reduce(lambda x, y: x + y,
                   [ds.dataset_file_set.all() \
                        for ds in self.experiment1.datasets.all()]))
                   
        # check download of file1
        response = client.get('/download/datafile/%i/' % self.datafile1.id)

        self.assertEqual(response['Content-Disposition'],
                         'attachment; filename="%s"'
                         % self.datafile1.filename)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.content, 'Hello World!\n')

        # requesting file2 should be forbidden...
        response = client.get('/download/datafile/%i/' % self.datafile2.id)
        self.assertEqual(response.status_code, 403)

        # check dataset1 download
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment1.id,
                                'dataset': [self.dataset1.id],
                                'datafile': []})
        self.assertEqual(response.status_code, 200)
        self._check_zip_file(response.content, 'datasets',
                             self.dataset1.dataset_file_set.all())

        # check dataset1 download as tar
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment1.id,
                                'dataset': [self.dataset1.id],
                                'datafile': [],
                                'comptype': 'tar'})
        self.assertEqual(response.status_code, 200)
        self._check_tar_file(response.content, 'datasets',
                             self.dataset1.dataset_file_set.all())

        # check dataset2 download
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment2.id,
                                'dataset': [self.dataset2.id],
                                'datafile': []})
        self.assertEqual(response.status_code, 403)

        # check datafile1 download via POST
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment1.id,
                                'dataset': [],
                                'datafile': [self.datafile1.id]})
        self.assertEqual(response.status_code, 200)
        self._check_zip_file(response.content, 'datasets', [self.datafile1])

        # check datafile2 download via POST
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment2.id,
                                'dataset': [],
                                'datafile': [self.datafile2.id]})
        self.assertEqual(response.status_code, 403)

        # Check datafile2 download with second experiment to "metadata only"
        self.experiment2.public_access=Experiment.PUBLIC_ACCESS_METADATA
        self.experiment2.save()
        response = client.get('/download/datafile/%i/' % self.datafile2.id)
        # Metadata-only means "no file access"!
        self.assertEqual(response.status_code, 403)

        # Check datafile2 download with second experiment to public
        self.experiment2.public_access=Experiment.PUBLIC_ACCESS_FULL
        self.experiment2.save()
        response = client.get('/download/datafile/%i/' % self.datafile2.id)
        self.assertEqual(response.status_code, 200)
        # This should be a TIFF (which often starts with "II\x2a\x00")
        self.assertEqual(response['Content-Type'], 'image/tiff')
        self.assertEqual(response.content[0:4], "II\x2a\x00")

        # check experiment zip download with alternative organization
        response = client.get('/download/experiment/%i/zip/test/' % \
                                  self.experiment1.id)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(response['Content-Disposition'],
                         'attachment; filename="experiment%s-complete.zip"'
                         % self.experiment1.id)
        self._check_zip_file(
            response.content, str(self.experiment1.id),
            reduce(lambda x, y: x + y,
                   [ds.dataset_file_set.all() \
                        for ds in self.experiment1.datasets.all()]),
            simpleNames=True)

        # check experiment tar download with alternative organization
        response = client.get('/download/experiment/%i/tar/test/' % \
                                  self.experiment1.id)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(response['Content-Disposition'],
                         'attachment; filename="experiment%s-complete.tar"'
                         % self.experiment1.id)
        self._check_tar_file(
            response.content, str(self.experiment1.id),
            reduce(lambda x, y: x + y,
                   [ds.dataset_file_set.all() \
                        for ds in self.experiment1.datasets.all()]),
            simpleNames=True)

        # check experiment1 download with '.txt' filtered out (none left)
        response = client.get('/download/experiment/%i/tar/test2/' % \
                                  self.experiment1.id)
        self.assertEqual(response.status_code, 400)

        # check experiment2 download with '.txt' filtered out
        response = client.get('/download/experiment/%i/tar/test2/' % \
                                  self.experiment2.id)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(response['Content-Disposition'],
                         'attachment; filename="experiment%s-complete.tar"'
                         % self.experiment2.id)
        self._check_tar_file(
            response.content, str(self.experiment2.id),
            reduce(lambda x, y: x + y,
                   [ds.dataset_file_set.all() \
                        for ds in self.experiment2.datasets.all()]),
            simpleNames=True, noTxt=True)

        # check dataset1 download
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment1.id,
                                'dataset': [self.dataset1.id],
                                'datafile': [],
                                'comptype': 'zip',
                                'organization': 'test'})
        self.assertEqual(response.status_code, 200)
        self._check_zip_file(response.content, 'datasets',
                             self.dataset1.dataset_file_set.all(),
                             simpleNames=True)


    def testDatasetFile(self):

        # check registered text file for physical file meta information
        df = Dataset_File.objects.get(pk=self.datafile1.id)

        try:
            from magic import Magic
            self.assertEqual(df.mimetype, 'text/plain; charset=us-ascii')
        except:
            # XXX Test disabled because lib magic can't be loaded
            pass
        self.assertEqual(df.size, str(13))
        self.assertEqual(df.md5sum, '8ddd8be4b179a529afa5f2ffae4b9858')

        # Now check we can calculate checksums and infer the mime type
        # for a JPG file.
        filename = abspath(join(dirname(__file__),
                                '../static/images/ands-logo-hi-res.jpg'))

        dataset = Dataset.objects.get(pk=self.dataset1.id)

        pdf1 = self._build_datafile(filename, basename(filename), dataset, 
                                    'file://%s' % filename, protocol='file')
        self.assertEqual(pdf1.get_preferred_replica().verify(), True)
        pdf1 = Dataset_File.objects.get(pk=pdf1.pk)        

        try:
            from magic import Magic
            self.assertEqual(pdf1.mimetype, 'image/jpeg')
        except:
            # XXX Test disabled because lib magic can't be loaded
            pass
        self.assertEqual(pdf1.size, str(14232))
        self.assertEqual(pdf1.md5sum, 'c450d5126ffe3d14643815204daf1bfb')

        # Now check that we can override the physical file meta information
        # We are setting size/checksums that don't match the actual file, so
        # the 
        pdf2 = self._build_datafile(
            filename, filename, dataset,
            'file://%s' % filename, protocol='file', 
            mimetype='application/vnd.openxmlformats-officedocument.presentationml.presentation',
            size=0,
            # Empty string always has the same hash
            checksum='cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e')
        self.assertEqual(pdf2.size, str(0))
        self.assertEqual(pdf2.md5sum, '')
        self.assertEqual(pdf2.get_preferred_replica().verify(), False)
        pdf2 = Dataset_File.objects.get(pk=pdf2.pk)
        try:
            from magic import Magic
            self.assertEqual(pdf2.mimetype, 'application/vnd.openxmlformats-officedocument.presentationml.presentation')
        except:
            # XXX Test disabled because lib magic can't be loaded
            pass
        self.assertEqual(pdf2.size, str(0))
        self.assertEqual(pdf2.md5sum, '')

        pdf2.mimetype = ''
        pdf2.save()
        pdf2.get_preferred_replica().save()
        pdf2 = Dataset_File.objects.get(pk=pdf2.pk)

        try:
            from magic import Magic
            self.assertEqual(pdf2.mimetype, 'application/pdf')
        except:
            # XXX Test disabled because lib magic can't be loaded
            pass
Exemplo n.º 38
0
class MyTardisHSMUtilsTestCase(TestCase):
    """Test cases for the MyTardisHSM utils module"""
    def setUp(self):
        """Setup test fixtures if needed."""
        self.user = User.objects.create_user("doctor", '', "pwd")

        self.exp = Experiment(title="Wonderful",
                              institution_name="Monash University",
                              created_by=self.user)
        self.exp.save()

        group = Group(name="Group1")
        group.save()

        facility = Facility(name="Test Facility", manager_group=group)
        facility.save()

        inst = Instrument(name="Test Instrument1", facility=facility)
        inst.save()

        self.dataset = Dataset(description="Dataset1", instrument=inst)
        self.dataset.save()

        storage_classes = getattr(settings, "HSM_STORAGE_CLASSES",
                                  DEFAULT_HSM_CLASSES)
        self.sbox1 = StorageBox(name="SBOX1",
                                django_storage_class=storage_classes[0],
                                status='online',
                                max_size=256)
        self.sbox1.save()
        sbox1_attr = StorageBoxAttribute(storage_box=self.sbox1,
                                         key='type',
                                         value=StorageBox.DISK)
        sbox1_attr.save()
        sbox1_loc_opt = StorageBoxOption(storage_box=self.sbox1,
                                         key="location",
                                         value="/dummy/path")
        sbox1_loc_opt.save()

        self.sbox2 = StorageBox(
            name="SBOX2",
            django_storage_class="any.non.disk.StorageSystem",
            status='offline',
            max_size=256)
        self.sbox2.save()
        sbox2_attr = StorageBoxAttribute(storage_box=self.sbox2,
                                         key='type',
                                         value=StorageBox.TAPE)
        sbox2_attr.save()

        self.df1 = DataFile(dataset=self.dataset, filename="test_df.jpg")
        self.df1.save()
        self.dfo1 = DataFileObject(datafile=self.df1,
                                   storage_box=self.sbox1,
                                   uri="stream/test.jpg")
        self.dfo1.save()

    def tearDown(self):
        """Remove stuff"""

    def test_000_lock_datafile(self):
        """We should be able to lock a datafile to prevent concurrent access"""
        with DatafileLock(self.df1, "dummy_oid1") as lock1:
            if lock1:
                with DatafileLock(self.df1, "dummy_oid2") as lock2:
                    self.assertTrue(lock1)
                    self.assertFalse(lock2)

    def test_001_datafile_lock_expiry(self):
        """A datafile lock should not release until the expiry has been
        reached"""
        with DatafileLock(self.df1, "dummy_oid1", expires=2) as lock1:
            self.assertTrue(lock1)

        # If we retry lock right away, lock acquisition should fail because
        # expiry hasn't been reached
        with DatafileLock(self.df1, "dummy_oid1_1") as lock1:
            self.assertFalse(lock1)

        # wait 2s for lock to release
        time.sleep(2)

        # If we retry acquiring the lock now it should succeed
        with DatafileLock(self.df1, "dummy_oid1_2") as lock1:
            self.assertTrue(lock1)
Exemplo n.º 39
0
class DownloadTestCase(TestCase):

    def setUp(self):
        # create a test user
        self.user = User.objects.create_user(username='******',
                                             email='',
                                             password='******')

        # create a public experiment
        self.experiment1 = Experiment(title='Experiment 1',
                                      created_by=self.user,
                                      public=True)
        self.experiment1.save()

        # create a non-public experiment
        self.experiment2 = Experiment(title='Experiment 2',
                                      created_by=self.user,
                                      public=False)
        self.experiment2.save()

        # dataset1 belongs to experiment1
        self.dataset1 = Dataset(experiment=self.experiment1)
        self.dataset1.save()

        # dataset2 belongs to experiment2
        self.dataset2 = Dataset(experiment=self.experiment2)
        self.dataset2.save()

        # absolute path first
        filename = 'testfile.txt'
        self.dest1 = abspath(join(settings.FILE_STORE_PATH, '%s/%s/'
                                  % (self.experiment1.id,
                                  self.dataset1.id)))
        self.dest2 = abspath(join(settings.FILE_STORE_PATH,
                                '%s/%s/'
                                  % (self.experiment2.id,
                                  self.dataset2.id)))
        if not exists(self.dest1):
            makedirs(self.dest1)
        if not exists(self.dest2):
            makedirs(self.dest2)

        testfile1 = abspath(join(self.dest1, filename))
        f = open(testfile1, 'w')
        f.write("Hello World!\n")
        f.close()

        testfile2 = abspath(join(self.dest2, filename))
        f = open(testfile2, 'w')
        f.write("Hello World!\n")
        f.close()

        self.dataset_file1 = Dataset_File(dataset=self.dataset1,
                                          filename=filename,
                                          protocol='tardis',
                                          url='tardis://%s' % filename)
        self.dataset_file1.save()

        self.dataset_file2 = Dataset_File(dataset=self.dataset2,
                                          filename=basename(filename),
                                          protocol='tardis',
                                          url='tardis://%s' % filename)
        self.dataset_file2.save()

    def tearDown(self):
        self.user.delete()
        self.experiment1.delete()
        self.experiment2.delete()
        rmtree(self.dest1)
        rmtree(self.dest2)

    def testDownload(self):
        client = Client()

        # check download for experiment1
        response = client.get('/download/experiment/%i/zip/' % self.experiment1.id)
        self.assertEqual(response['Content-Disposition'],
                         'attachment; filename="experiment%s-complete.zip"'
                         % self.experiment1.id)
        self.assertEqual(response.status_code, 200)

        # check download of file1
        response = client.get('/download/datafile/%i/' % self.dataset_file1.id)

        self.assertEqual(response['Content-Disposition'],
                         'attachment; filename="%s"'
                         % self.dataset_file2.filename)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.content, 'Hello World!\n')

        # requesting file2 should be forbidden...
        response = client.get('/download/datafile/%i/' % self.dataset_file2.id)
        self.assertEqual(response.status_code, 403)

        # check dataset1 download
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment1.id,
                                'dataset': [self.dataset1.id],
                                'datafile': []})
        self.assertEqual(response.status_code, 200)

        # check dataset2 download
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment2.id,
                                'dataset': [self.dataset2.id],
                                'datafile': []})
        self.assertEqual(response.status_code, 403)

        # check datafile1 download via POST
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment1.id,
                                'dataset': [],
                                'datafile': [self.dataset_file1.id]})
        self.assertEqual(response.status_code, 200)

        # check datafile2 download via POST
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment2.id,
                                'dataset': [],
                                'datafile': [self.dataset_file2.id]})
        self.assertEqual(response.status_code, 403)

    def testDatasetFile(self):

        # check registered text file for physical file meta information
        df = Dataset_File.objects.get(pk=self.dataset_file1.id)

        try:
            from magic import Magic
            self.assertEqual(df.mimetype, 'text/plain; charset=us-ascii')
        except:
            # XXX Test disabled becuse lib magic can't be loaded
            pass
        self.assertEqual(df.size, str(13))
        self.assertEqual(df.md5sum, '8ddd8be4b179a529afa5f2ffae4b9858')

        # now check a JPG file
        filename = join(abspath(dirname(__file__)),
                        '../static/images/ands-logo-hi-res.jpg')

        dataset = Dataset.objects.get(pk=self.dataset1.id)

        pdf1 = Dataset_File(dataset=dataset,
                            filename=basename(filename),
                            url='file://%s' % filename,
                            protocol='file')
        pdf1.save()
        try:
            from magic import Magic
            self.assertEqual(pdf1.mimetype, 'image/jpeg')
        except:
            # XXX Test disabled becuse lib magic can't be loaded
            pass
        self.assertEqual(pdf1.size, str(14232))
        self.assertEqual(pdf1.md5sum, 'c450d5126ffe3d14643815204daf1bfb')

        # now check that we can override the physical file meta information
        pdf2 = Dataset_File(dataset=dataset,
                            filename=basename(filename),
                            url='file://%s' % filename,
                            protocol='file',
                            mimetype='application/vnd.openxmlformats-officedocument.presentationml.presentation',
                            size=str(0),
                            md5sum='md5sum')
        pdf2.save()
        try:
            from magic import Magic
            self.assertEqual(pdf2.mimetype, 'application/vnd.openxmlformats-officedocument.presentationml.presentation')
        except:
            # XXX Test disabled becuse lib magic can't be loaded
            pass
        self.assertEqual(pdf2.size, str(0))
        self.assertEqual(pdf2.md5sum, 'md5sum')

        pdf2.mimetype = ''
        pdf2.save()

        try:
            from magic import Magic
            self.assertEqual(pdf2.mimetype, 'application/pdf')
        except:
            # XXX Test disabled becuse lib magic can't be loaded
            pass
Exemplo n.º 40
0
class MyTardisHSMTFiltersTestCase(TestCase):
    """Testing MyTardis HSM App filters"""
    def setUp(self):
        """Setup test fixtures if needed."""
        self.user = User.objects.create_user("doctor", '', "pwd")

        self.exp = Experiment(title="Wonderful",
                              institution_name="Monash University",
                              created_by=self.user)
        self.exp.save()

        group = Group(name="Group1")
        group.save()

        facility = Facility(name="Test Facility", manager_group=group)
        facility.save()

        self.inst = Instrument(name="Test Instrument1", facility=facility)
        self.inst.save()

        self.dataset = Dataset(description="Dataset1", instrument=self.inst)
        self.dataset.save()

        self.dataset.experiments.add(self.exp)

        storage_classes = getattr(settings, "HSM_STORAGE_CLASSES",
                                  DEFAULT_HSM_CLASSES)
        self.sbox1 = StorageBox(name="SBOX1",
                                django_storage_class=storage_classes[0],
                                status='online',
                                max_size=256)
        self.sbox1.save()
        sbox1_attr = StorageBoxAttribute(storage_box=self.sbox1,
                                         key='type',
                                         value=StorageBox.DISK)
        sbox1_attr.save()
        sbox1_loc_opt = StorageBoxOption(storage_box=self.sbox1,
                                         key="location",
                                         value=tempfile.gettempdir())
        sbox1_loc_opt.save()

        self.sbox2 = StorageBox(
            name="SBOX2",
            django_storage_class="any.non.disk.StorageSystem",
            status='offline',
            max_size=256)
        self.sbox2.save()
        sbox2_attr = StorageBoxAttribute(storage_box=self.sbox2,
                                         key='type',
                                         value=StorageBox.TAPE)
        sbox2_attr.save()

    @mock.patch("os.stat")
    def test_001_create_df_status(self, mock_stat):
        """When a new datafile record is verified, metadata for it's
        online/offline status should be created and populated with the
        current online status"""
        mock_stat.return_value = Stats(st_size=10000,
                                       st_blocks=100,
                                       st_mtime=datetime.now())

        temp = tempfile.NamedTemporaryFile(dir=tempfile.gettempdir())
        temp_name = os.path.basename(temp.name)
        df2 = DataFile(dataset=self.dataset, filename=temp_name)
        df2.save()
        dfo2 = DataFileObject(datafile=df2,
                              storage_box=self.sbox1,
                              uri=temp_name)
        dfo2.save()
        df2.verify()

        param_name = ParameterName.objects.get(
            schema__namespace=HSM_DATAFILE_NAMESPACE, name="online")

        paramset = DatafileParameterSet.objects.get(
            schema__namespace=HSM_DATAFILE_NAMESPACE, datafile=df2)

        param = DatafileParameter.objects.get(parameterset=paramset,
                                              name=param_name)

        self.assertEquals(param.string_value, "True")
        temp.close()

    @mock.patch("os.stat")
    def test_002_no_duplicate_params(self, mock_stat):
        """Datafile should only ever have one online param"""
        mock_stat.return_value = Stats(st_size=10000,
                                       st_blocks=100,
                                       st_mtime=datetime.now())

        df1 = DataFile(dataset=self.dataset, filename="test_df.jpg")
        df1.save()
        dfo1 = DataFileObject(datafile=df1,
                              storage_box=self.sbox1,
                              uri="stream/test.jpg",
                              verified=True)
        dfo1.save()
        df1.verify()

        param_name = ParameterName.objects.get(
            schema__namespace=HSM_DATAFILE_NAMESPACE, name="online")

        paramset = DatafileParameterSet.objects.get(
            schema__namespace=HSM_DATAFILE_NAMESPACE, datafile=df1)

        params = DatafileParameter.objects.filter(parameterset=paramset,
                                                  name=param_name)

        self.assertEquals(params.count(), 1)

        self.assertRaises(OnlineParamExistsError,
                          create_df_status(df1, HSM_DATAFILE_NAMESPACE, 500))

        params = DatafileParameter.objects.filter(parameterset=paramset,
                                                  name=param_name)

        self.assertEquals(params.count(), 1)

    @mock.patch("os.stat")
    def test_003_offline_dataset(self, mock_stat):
        """A dataset should be offline if any datafiles are offline"""
        mock_stat.return_value = Stats(st_size=10000,
                                       st_blocks=0,
                                       st_mtime=datetime.now())
        ds = Dataset(description="Dataset2", instrument=self.inst)
        ds.save()

        df2 = DataFile(dataset=ds, filename="test_file.jpg")
        df2.save()
        dfo2 = DataFileObject(datafile=df2,
                              storage_box=self.sbox1,
                              uri=df2.filename)
        dfo2.save()
        df2.verify()

        self.assertFalse(dataset_online(ds))

    @mock.patch("os.stat")
    def test_004_offline_experiment(self, mock_stat):
        """An experiment should be offline if any datafiles are offline"""
        mock_stat.return_value = Stats(st_size=10000,
                                       st_blocks=0,
                                       st_mtime=datetime.now())
        ds = Dataset(description="Dataset2", instrument=self.inst)
        ds.save()
        ds.experiments.add(self.exp)

        df2 = DataFile(dataset=ds, filename="test_file.jpg")
        df2.save()
        dfo2 = DataFileObject(datafile=df2,
                              storage_box=self.sbox1,
                              uri=df2.filename)
        dfo2.save()
        df2.verify()

        self.assertFalse(experiment_online(self.exp))
class ParameterSetManagerTestCase(TestCase):

    def setUp(self):
        from django.contrib.auth.models import User
        from tempfile import mkdtemp

        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)

        self.test_dir = mkdtemp()

        self.exp = Experiment(title='test exp1',
                              institution_name='monash',
                              created_by=self.user)
        self.exp.save()

        self.dataset = Dataset(description="dataset description...")
        self.dataset.save()
        self.dataset.experiments.add(self.exp)
        self.dataset.save()

        self.datafile = DataFile(dataset=self.dataset,
                                 filename="testfile.txt",
                                 size="42", md5sum='bogus')
        self.datafile.save()

        self.dfo = DataFileObject(
            datafile=self.datafile,
            storage_box=self.datafile.get_default_storage_box(),
            uri="1/testfile.txt")
        self.dfo.save()

        self.schema = Schema(
            namespace="http://localhost/psmtest/df/",
            name="Parameter Set Manager", type=3)
        self.schema.save()

        self.parametername1 = ParameterName(
            schema=self.schema, name="parameter1",
            full_name="Parameter 1")
        self.parametername1.save()

        self.parametername2 = ParameterName(
            schema=self.schema, name="parameter2",
            full_name="Parameter 2",
            data_type=ParameterName.NUMERIC)
        self.parametername2.save()

        self.parametername3 = ParameterName(
            schema=self.schema, name="parameter3",
            full_name="Parameter 3",
            data_type=ParameterName.DATETIME)
        self.parametername3.save()

        self.datafileparameterset = DatafileParameterSet(
            schema=self.schema, datafile=self.datafile)
        self.datafileparameterset.save()

        self.datafileparameter1 = DatafileParameter(
            parameterset=self.datafileparameterset,
            name=self.parametername1, string_value="test1")
        self.datafileparameter1.save()

        self.datafileparameter2 = DatafileParameter(
            parameterset=self.datafileparameterset,
            name=self.parametername2, numerical_value=2)
        self.datafileparameter2.save()

        # Create a ParameterName and Parameter of type LINK to an experiment
        self.parametername_exp_link = ParameterName(
            schema=self.schema, name="exp_link",
            full_name="This parameter is a experiment LINK",
            data_type=ParameterName.LINK)
        self.parametername_exp_link.save()

        self.exp_link_param = DatafileParameter(
            parameterset=self.datafileparameterset,
            name=self.parametername_exp_link)
        exp_url = self.exp.get_absolute_url()  # /experiment/view/1/
        self.exp_link_param.set_value(exp_url)
        self.exp_link_param.save()

        # Create a ParameterName and Parameter of type LINK to a dataset
        self.parametername_dataset_link = ParameterName(
            schema=self.schema, name="dataset_link",
            full_name="This parameter is a dataset LINK",
            data_type=ParameterName.LINK)
        self.parametername_dataset_link.save()

        self.dataset_link_param = DatafileParameter(
            parameterset=self.datafileparameterset,
            name=self.parametername_dataset_link)
        dataset_url = self.dataset.get_absolute_url()  # /dataset/1/
        self.dataset_link_param.set_value(dataset_url)
        self.dataset_link_param.save()

        # Create a ParameterName type LINK to an unresolvable (non-URL)
        # free-text value
        self.parametername_unresolvable_link = ParameterName(
                schema=self.schema, name="freetext_link",
                full_name="This parameter is a non-URL LINK",
                data_type=ParameterName.LINK)
        self.parametername_unresolvable_link.save()

    def tearDown(self):
        self.exp.delete()
        self.user.delete()
        self.parametername1.delete()
        self.parametername2.delete()
        self.parametername3.delete()
        self.parametername_exp_link.delete()
        self.parametername_dataset_link.delete()
        self.parametername_unresolvable_link.delete()
        self.schema.delete()

    def test_existing_parameterset(self):

        psm = ParameterSetManager(parameterset=self.datafileparameterset)

        self.assertTrue(psm.get_schema().namespace ==
                        "http://localhost/psmtest/df/")

        self.assertTrue(psm.get_param("parameter1").string_value == "test1")

        self.assertTrue(psm.get_param("parameter2", True) == 2)

    def test_new_parameterset(self):

        psm = ParameterSetManager(parentObject=self.datafile,
                                  schema="http://localhost/psmtest/df2/")

        self.assertTrue(psm.get_schema().namespace ==
                        "http://localhost/psmtest/df2/")

        psm.set_param("newparam1", "test3", "New Parameter 1")

        self.assertTrue(psm.get_param("newparam1").string_value ==
                        "test3")

        self.assertTrue(psm.get_param("newparam1").name.full_name ==
                        "New Parameter 1")

        psm.new_param("newparam1", "test4")

        self.assertTrue(len(psm.get_params("newparam1", True)) == 2)

        psm.set_param_list("newparam2", ("a", "b", "c", "d"))

        self.assertTrue(len(psm.get_params("newparam2")) == 4)

        psm.set_params_from_dict(
            {"newparam2": "test5", "newparam3": 3})

        self.assertTrue(psm.get_param("newparam2", True) == "test5")

        # the newparam3 gets created and '3' is set to a string_value
        # since once cannot assume that an initial numeric value
        # will imply continuing numeric type for this new param
        self.assertTrue(psm.get_param("newparam3").string_value == '3')

        psm.delete_params("newparam1")

        self.assertTrue(len(psm.get_params("newparam1", True)) == 0)

    def test_link_parameter_type(self):
        """
        Test that Parameter.link_gfk (GenericForeignKey) is correctly
        assigned after using Parameter.set_value(some_url) for a LINK Parameter.
        """
        psm = ParameterSetManager(parameterset=self.datafileparameterset)

        # Check link to experiment
        exp_url = self.exp.get_absolute_url()  # /experiment/view/1/
        self.assertTrue(psm.get_param("exp_link").string_value ==
                        exp_url)

        self.assertTrue(psm.get_param("exp_link").link_id ==
                        self.exp.id)

        exp_ct = ContentType.objects.get(model__iexact="experiment")
        self.assertTrue(psm.get_param("exp_link").link_ct == exp_ct)

        self.assertTrue(psm.get_param("exp_link").link_gfk == self.exp)

        # Check link to dataset
        dataset_url = self.dataset.get_absolute_url()  # /dataset/1/
        self.assertTrue(psm.get_param("dataset_link").string_value ==
                        dataset_url)

        self.assertTrue(psm.get_param("dataset_link").link_id ==
                        self.dataset.id)

        dataset_ct = ContentType.objects.get(model__iexact="dataset")
        self.assertTrue(psm.get_param("dataset_link").link_ct == dataset_ct)

        self.assertTrue(psm.get_param("dataset_link").link_gfk == self.dataset)

    def test_link_parameter_type_extra(self):
        # make a second ParameterSet for testing some variations
        # in URL values
        self.datafileparameterset2 = DatafileParameterSet(
            schema=self.schema, datafile=self.datafile)
        self.datafileparameterset2.save()

        psm = ParameterSetManager(parameterset=self.datafileparameterset2)

        self.dataset_link_param2 = DatafileParameter(
            parameterset=self.datafileparameterset2,
            name=self.parametername_dataset_link)
        # /dataset/1 - no trailing slash
        dataset_url = self.dataset.get_absolute_url()
        self.dataset_link_param2.set_value(dataset_url)
        self.dataset_link_param2.save()

        # Check link_id/link_ct/link_gfk to dataset
        self.assertTrue(psm.get_param("dataset_link").link_id ==
                        self.dataset.id)

        dataset_ct = ContentType.objects.get(model__iexact="dataset")
        self.assertTrue(psm.get_param("dataset_link").link_ct == dataset_ct)

        self.assertTrue(psm.get_param("dataset_link").link_gfk == self.dataset)

        # Test links of the form /api/v1/experiment/<experiment_id>/
        self.exp_link_param2 = DatafileParameter(
            parameterset=self.datafileparameterset2,
            name=self.parametername_exp_link)
        exp_url = '/api/v1/experiment/%s/' % self.exp.id
        self.exp_link_param2.set_value(exp_url)
        self.exp_link_param2.save()

        # Check link_id/link_ct/link_gfk to experiment
        self.assertTrue(psm.get_param("exp_link").link_id ==
                        self.exp.id)

        exp_ct = ContentType.objects.get(model__iexact="experiment")
        self.assertTrue(psm.get_param("exp_link").link_ct == exp_ct)

        self.assertTrue(psm.get_param("exp_link").link_gfk == self.exp)

    def test_unresolvable_link_parameter(self):
        """
        Test that LINK Parameters that can't be resolved to a model (including
        non-URL values) still work.
        """
        self.datafileparameterset3 = DatafileParameterSet(
                schema=self.schema, datafile=self.datafile)
        self.datafileparameterset3.save()

        psm = ParameterSetManager(parameterset=self.datafileparameterset3)

        # Create a Parameter of type LINK to an unresolvable (non-URL)
        # free-text value
        self.freetext_link_param = DatafileParameter(
                parameterset=self.datafileparameterset3,
                name=self.parametername_unresolvable_link)
        self.assertRaises(SuspiciousOperation,
                          lambda: self.freetext_link_param.set_value(
                              "FREETEXT_ID_123"))

    def test_tz_naive_date_handling(self):
        """
        Ensure that dates are handling in a timezone-aware way.
        """
        psm = ParameterSetManager(parameterset=self.datafileparameterset)

        psm.new_param("parameter3", str(datetime(1970, 01, 01, 10, 0, 0)))

        expect(psm.get_param("parameter3", True))\
            .to_equal(datetime(1970, 01, 01, 0, 0, 0, tzinfo=pytz.utc))

    def test_tz_aware_date_handling(self):
        """
        Ensure that dates are handling in a timezone-aware way.
        """
        psm = ParameterSetManager(parameterset=self.datafileparameterset)

        psm.new_param("parameter3",
                      '1970-01-01T08:00:00+08:00')

        expect(psm.get_param("parameter3", True))\
            .to_equal(datetime(1970, 01, 01, 0, 0, 0, tzinfo=pytz.utc))
Exemplo n.º 42
0
class Task(ParameterSetManager):
    schema_name = "http://localhost/task/generic"
    namespace = schema_name
    dataset = None
    DPS = None
    myHPC = None

    doNotCopyParams = ['TaskStatus',
                       'jobscript',  # many
                       'jobid',  # many
                       'jobidstatus',  # many
                       ]

    def __init__(self, dataset=None, dataset_id=None,
                 description="", experiment_id=None):
        """
        instantiate new task or existing task
        :param dataset: optional parameter to instanciate task from
          metadata, will be tested for completeness and copied into
          new task if complete
        :type dataset: Dataset
        """
        if dataset:
            self.dataset = dataset
        elif dataset_id:
            self.dataset = Dataset.objects.get(pk=dataset_id)
        else:
            if description == "":
                raise TypeError("No description given")
            if not experiment_id:
                raise TypeError("No experiment id given")
            self.dataset = Dataset()
            self.dataset.experiment_id = experiment_id
            self.dataset.description = description
            self.dataset.save()
        try:
            thisparameterset = DatasetParameterSet.objects.get(
                schema=self.get_schema(),
                dataset=self.dataset)
        except ObjectDoesNotExist:
            thisparameterset = DatasetParameterSet(
                schema=self.get_schema(),
                dataset=self.dataset)
            thisparameterset.save()
        super(Task, self).__init__(parameterset=thisparameterset)

    def get_status(self, value=False):
        try:
            return self.get_param("TaskStatus", value)
        except:
            return None

    def set_status(self, status):
        current_status = self.get_status(value=True)
        if current_status != status:
            self.set_param("TaskStatus", status, "Status of task")

    def get_files(self):
        return Dataset_File.objects.filter(dataset=self.dataset)

    def get_by_value(self, value):
        try:
            par = self.parameters.get(string_value=value)
        except ObjectDoesNotExist:
            try:
                par = self.parameters.get(numerical_value=value)
            except (ObjectDoesNotExist, ValueError):
                return None
        return par

    def parseResults(self):
        """
        stub, to be overridden by subclass if needed
        """
        pass

    def sendMail(self, toName, toAddress, returnURI,
                 type="JobComplete"):
        from django.core.mail import send_mail
        subject = "Your Task %s is complete" % self.dataset.description
        message = "Dear %s,\n" % toName
        message += "\nYour job %s is complete " % self.dataset.description
        message += "and the results are stored in myTardis.\n"
        message += "HOSTNAME\n"
        message += "\nBest regards,\nYour myTardis\n"
        send_mail(subject, message, '*****@*****.**',
                  [toAddress])

    @classmethod
    def getTaskList(cls, experiment_id, status="any"):
        """
        Get list of all tasks or specify the type as string
        :param experiment: the experiment that is being searched for tasks
        :type experiment: Experiment
        :param taskclass: the subclass of Task
        :type type: string
        yields DatasetParameterSet
        """
        DPSs = DatasetParameterSet.objects.filter(
            schema__namespace__startswith=cls.schema_name,
            dataset__experiment__pk=experiment_id)
        tasklist = [cls(dataset=dps.dataset) for dps in DPSs]
        if status == "any":
            return tasklist
        filteredlist = []
        for thistask in tasklist:
            try:
                if thistask.get_status(value=True) == status:
                    filteredlist.append(thistask)
            except ObjectDoesNotExist:
                continue
        return filteredlist

    @classmethod
    def clone(cls, oldInstance, newDescription, username):
        newInstance = cls(description=newDescription,
                          experiment_id=oldInstance.dataset.experiment.id)
        for param in oldInstance.parameters:
            if param.name.name not in cls.doNotCopyParams:
                if param.name.isNumeric():
                    value = param.numerical_value
                else:
                    value = param.string_value
                newInstance.new_param(param.name.name, value)
        import shutil
        import os
        for filename in oldInstance.get_params("uploaded_file", value=True):
            if filename[-8:] != ".jobfile":
                thisfile = Dataset_File.objects.get(
                    dataset=oldInstance.dataset,
                    filename=filename)
                shutil.copy(thisfile.get_absolute_filepath(),
                            get_full_staging_path(username))
                newfileurl = os.path.join(get_full_staging_path(username),
                                          filename)
                newDatafile = Dataset_File(
                    dataset=newInstance.dataset,
                    url=newfileurl,
                    protocol="staging",
                    mimetype=thisfile.mimetype,
                    )
                newDatafile.save()
        return newInstance
Exemplo n.º 43
0
class DownloadTestCase(TestCase):

    def setUp(self):
        # create a test user
        self.user = User.objects.create_user(username='******',
                                             email='',
                                             password='******')

        # create a public experiment
        self.experiment1 = Experiment(title='Experiment 1',
                                      created_by=self.user,
                                      public_access=Experiment.PUBLIC_ACCESS_FULL)
        self.experiment1.save()

        # create a non-public experiment
        self.experiment2 = Experiment(title='Experiment 2',
                                      created_by=self.user,
                                      public_access=Experiment.PUBLIC_ACCESS_NONE)
        self.experiment2.save()

        # dataset1 belongs to experiment1
        self.dataset1 = Dataset()
        self.dataset1.save()
        self.dataset1.experiments.add(self.experiment1)
        self.dataset1.save()


        # dataset2 belongs to experiment2
        self.dataset2 = Dataset()
        self.dataset2.save()
        self.dataset2.experiments.add(self.experiment2)
        self.dataset2.save()

        # absolute path first
        filename1 = 'testfile.txt'
        filename2 = 'testfile.tiff'
        self.dest1 = abspath(join(settings.FILE_STORE_PATH, '%s/%s/'
                                  % (self.experiment1.id,
                                  self.dataset1.id)))
        self.dest2 = abspath(join(settings.FILE_STORE_PATH,
                                '%s/%s/'
                                  % (self.experiment2.id,
                                  self.dataset2.id)))
        if not exists(self.dest1):
            makedirs(self.dest1)
        if not exists(self.dest2):
            makedirs(self.dest2)

        testfile1 = abspath(join(self.dest1, filename1))
        f = open(testfile1, 'w')
        f.write("Hello World!\n")
        f.close()

        testfile2 = abspath(join(self.dest2, filename2))
        if IMAGEMAGICK_AVAILABLE:
            with Image(filename='logo:') as img:
                img.format = 'tiff'
                img.save(filename=testfile2)
        else:
            # Apparently ImageMagick isn't installed...
            # Write a "fake" TIFF file
            f = open(testfile2, 'w')
            f.write("II\x2a\x00")
            f.close()


        size, sha512sum = get_size_and_sha512sum(testfile1)
        self.dataset_file1 = Dataset_File(dataset=self.dataset1,
                                          filename=filename1,
                                          protocol='',
                                          size=size,
                                          sha512sum=sha512sum,
                                          url='%d/%d/%s'
                                              % (self.experiment1.id,
                                                 self.dataset1.id,
                                                 filename1))
        self.dataset_file1.verify()
        self.dataset_file1.save()

        size, sha512sum = get_size_and_sha512sum(testfile2)
        self.dataset_file2 = Dataset_File(dataset=self.dataset2,
                                          filename=basename(filename2),
                                          protocol='',
                                          size=size,
                                          sha512sum=sha512sum,
                                          url='%d/%d/%s'
                                            % (self.experiment2.id,
                                               self.dataset2.id,
                                               filename2))
        self.dataset_file2.verify()
        self.dataset_file2.save()

    def tearDown(self):
        self.user.delete()
        self.experiment1.delete()
        self.experiment2.delete()
        rmtree(self.dest1)
        rmtree(self.dest2)

    def testView(self):
        client = Client()

        # check view of file1
        response = client.get('/datafile/view/%i/' % self.dataset_file1.id)

        self.assertEqual(response['Content-Disposition'],
                         'inline; filename="%s"'
                         % self.dataset_file1.filename)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.content, 'Hello World!\n')

        # check view of file2
        response = client.get('/datafile/view/%i/' % self.dataset_file2.id)
        # Should be forbidden
        self.assertEqual(response.status_code, 403)

        self.experiment2.public_access=Experiment.PUBLIC_ACCESS_FULL
        self.experiment2.save()
        # check view of file2 again
        response = client.get('/datafile/view/%i/' % self.dataset_file2.id)
        self.assertEqual(response.status_code, 200)

        # The following behaviour relies on ImageMagick
        if IMAGEMAGICK_AVAILABLE:
            # file2 should have a ".png" filename
            self.assertEqual(response['Content-Disposition'],
                             'inline; filename="%s"'
                             % (self.dataset_file2.filename+'.png'))
            # file2 should be a PNG
            self.assertEqual(response['Content-Type'], 'image/png')
            png_signature = "\x89PNG\r\n\x1a\n"
            self.assertEqual(response.content[0:8], png_signature)
        else:
            # file2 should have a ".tiff" filename
            self.assertEqual(response['Content-Disposition'],
                             'inline; filename="%s"'
                             % (self.dataset_file2.filename))
            # file2 should be a TIFF
            self.assertEqual(response['Content-Type'], 'image/tiff')
            tiff_signature = "II\x2a\x00"
            self.assertEqual(response.content[0:4], tiff_signature)

    def _check_tar_file(self, content, rootdir, datafiles):
        # It should be a zip file
        with NamedTemporaryFile('w') as tempfile:
            tempfile.write(content)
            tempfile.flush()
            with open(tempfile.name, 'r') as zipread:
                # It should be a zip file (all of which start with "PK")
                expect(zipread.read(2)).to_equal('PK')
            expect(is_zipfile(tempfile.name)).to_be_truthy()
            with ZipFile(tempfile.name, 'r') as zf:
                expect(len(zf.namelist())).to_equal(len(datafiles))
                for df in datafiles:
                    filename = join(rootdir, str(df.dataset.id), df.filename)
                    expect(filename in zf.namelist()).to_be_truthy()

    def _check_zip_file(self, content, rootdir, datafiles):
        # It should be a zip file
        with NamedTemporaryFile('w') as tempfile:
            tempfile.write(content)
            tempfile.flush()
            with open(tempfile.name, 'r') as zipread:
                # It should be a zip file (all of which start with "PK")
                expect(zipread.read(2)).to_equal('PK')
            expect(is_zipfile(tempfile.name)).to_be_truthy()
            zf = ZipFile(tempfile.name, 'r')
            expect(len(zf.namelist())).to_equal(len(datafiles))
            for df in datafiles:
                filename = join(rootdir, str(df.dataset.id), df.filename)
                expect(filename in zf.namelist()).to_be_truthy()
            zf.close()


    def testDownload(self):
        client = Client()

        # check download for experiment1
        response = client.get('/download/experiment/%i/zip/' % self.experiment1.id)
        self.assertEqual(response['Content-Disposition'],
                         'attachment; filename="experiment%s-complete.zip"'
                         % self.experiment1.id)
        self.assertEqual(response.status_code, 200)
        self._check_zip_file(response.content, str(self.experiment1.id),
                             reduce(lambda x, y: x + y,
                                    [ds.dataset_file_set.all() \
                                     for ds in self.experiment1.datasets.all()]))

        # check download of file1
        response = client.get('/download/datafile/%i/' % self.dataset_file1.id)

        self.assertEqual(response['Content-Disposition'],
                         'attachment; filename="%s"'
                         % self.dataset_file1.filename)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.content, 'Hello World!\n')

        # requesting file2 should be forbidden...
        response = client.get('/download/datafile/%i/' % self.dataset_file2.id)
        self.assertEqual(response.status_code, 403)

        # check dataset1 download
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment1.id,
                                'dataset': [self.dataset1.id],
                                'datafile': []})
        self.assertEqual(response.status_code, 200)
        self._check_zip_file(response.content, 'datasets',
                             self.dataset1.dataset_file_set.all())

        # check dataset2 download
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment2.id,
                                'dataset': [self.dataset2.id],
                                'datafile': []})
        self.assertEqual(response.status_code, 403)

        # check datafile1 download via POST
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment1.id,
                                'dataset': [],
                                'datafile': [self.dataset_file1.id]})
        self.assertEqual(response.status_code, 200)
        self._check_zip_file(response.content, 'datasets', [self.dataset_file1])

        # check datafile2 download via POST
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment2.id,
                                'dataset': [],
                                'datafile': [self.dataset_file2.id]})
        self.assertEqual(response.status_code, 403)

        # Check datafile2 download with second experiment to "metadata only"
        self.experiment2.public_access=Experiment.PUBLIC_ACCESS_METADATA
        self.experiment2.save()
        response = client.get('/download/datafile/%i/' % self.dataset_file2.id)
        # Metadata-only means "no file access"!
        self.assertEqual(response.status_code, 403)

        # Check datafile2 download with second experiment to public
        self.experiment2.public_access=Experiment.PUBLIC_ACCESS_FULL
        self.experiment2.save()
        response = client.get('/download/datafile/%i/' % self.dataset_file2.id)
        self.assertEqual(response.status_code, 200)
        # This should be a TIFF (which often starts with "II\x2a\x00")
        self.assertEqual(response['Content-Type'], 'image/tiff')
        self.assertEqual(response.content[0:4], "II\x2a\x00")


    def testDatasetFile(self):

        # check registered text file for physical file meta information
        df = Dataset_File.objects.get(pk=self.dataset_file1.id)

        try:
            from magic import Magic
            self.assertEqual(df.mimetype, 'text/plain; charset=us-ascii')
        except:
            # XXX Test disabled becuse lib magic can't be loaded
            pass
        self.assertEqual(df.size, str(13))
        self.assertEqual(df.md5sum, '8ddd8be4b179a529afa5f2ffae4b9858')

        # now check a JPG file
        filename = abspath(join(dirname(__file__),
                                '../static/images/ands-logo-hi-res.jpg'))

        dataset = Dataset.objects.get(pk=self.dataset1.id)

        size, sha512sum = get_size_and_sha512sum(filename)
        pdf1 = Dataset_File(dataset=dataset,
                            filename=basename(filename),
                            size=str(size),
                            sha512sum=sha512sum,
                            url='file://%s' % filename,
                            protocol='file')
        pdf1.verify()
        pdf1.save()
        try:
            from magic import Magic
            self.assertEqual(pdf1.mimetype, 'image/jpeg')
        except:
            # XXX Test disabled becuse lib magic can't be loaded
            pass
        self.assertEqual(pdf1.size, str(14232))
        self.assertEqual(pdf1.md5sum, 'c450d5126ffe3d14643815204daf1bfb')

        # now check that we can override the physical file meta information
        pdf2 = Dataset_File(dataset=dataset,
                            filename=basename(filename),
                            url='file://%s' % filename,
                            protocol='file',
                            mimetype='application/vnd.openxmlformats-officedocument.presentationml.presentation',
                            size=str(0),
                            # Empty string always has the same hash
                            sha512sum='cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e')
        pdf2.save()
        try:
            from magic import Magic
            self.assertEqual(pdf2.mimetype, 'application/vnd.openxmlformats-officedocument.presentationml.presentation')
        except:
            # XXX Test disabled becuse lib magic can't be loaded
            pass
        self.assertEqual(pdf2.size, str(0))
        self.assertEqual(pdf2.md5sum, '')

        pdf2.mimetype = ''
        pdf2.save()

        try:
            from magic import Magic
            self.assertEqual(pdf2.mimetype, 'application/pdf')
        except:
            # XXX Test disabled becuse lib magic can't be loaded
            pass
Exemplo n.º 44
0
class SFTPTest(TestCase):
    def setUp(self):
        self.hostname = '127.0.0.1'
        self.username = '******'
        self.password = '******'
        email = ''
        self.user = User.objects.create_user(self.username, email,
                                             self.password)
        self.exp = Experiment(title='test exp1',
                              institution_name='monash',
                              created_by=self.user)
        self.exp.save()

        self.acl = ObjectACL(content_object=self.exp,
                             pluginId='django_user',
                             entityId=str(self.user.id),
                             isOwner=True,
                             canRead=True,
                             canWrite=True,
                             canDelete=True,
                             aclOwnershipType=ObjectACL.OWNER_OWNED)
        self.acl.save()

        self.dataset = Dataset(description='test dataset1')
        self.dataset.save()
        self.dataset.experiments.set([self.exp])
        self.dataset.save()

        def _build(dataset, filename, url):
            datafile_content = b"\n".join(
                [b'some data %d' % i for i in range(1000)])
            filesize = len(datafile_content)
            datafile = DataFile(dataset=dataset,
                                filename=filename,
                                size=filesize)
            datafile.save()
            dfo = DataFileObject(
                datafile=datafile,
                storage_box=datafile.get_default_storage_box(),
                uri=url)
            dfo.file_object = BytesIO(datafile_content)
            dfo.save()
            return datafile

        saved_setting = settings.REQUIRE_DATAFILE_CHECKSUMS
        try:
            settings.REQUIRE_DATAFILE_CHECKSUMS = False
            _build(self.dataset, 'file.txt', 'path/file.txt')
        finally:
            settings.REQUIRE_DATAFILE_CHECKSUMS = saved_setting

    def test_sftp(self):
        path_mapper = make_mapper(settings.DEFAULT_PATH_MAPPER, rootdir=None)

        server = flexmock(user=self.user)
        sftp_interface = MyTSFTPServerInterface(server=server)
        sftp_interface.session_started()

        exp_sftp_folders = sftp_interface.list_folder('/home/%s/experiments/' %
                                                      self.username)
        exp_sftp_folder_names = sorted(
            [sftp_folder.filename for sftp_folder in exp_sftp_folders])
        exp_folder_names = sorted(
            [path_mapper(exp) for exp in Experiment.safe.all(self.user)])
        self.assertEqual(exp_sftp_folder_names, exp_folder_names)

        ds_sftp_folders = sftp_interface.list_folder(
            '/home/%s/experiments/%s/' %
            (self.username, path_mapper(self.exp)))
        ds_sftp_folder_names = sorted(
            [sftp_folder.filename for sftp_folder in ds_sftp_folders])
        self.assertEqual(
            ds_sftp_folder_names,
            ['00_all_files', path_mapper(self.dataset)])

        sftp_files = sftp_interface.list_folder(
            '/home/%s/experiments/%s/%s/' %
            (self.username, path_mapper(self.exp), path_mapper(self.dataset)))
        sftp_filenames = sorted(
            [sftp_file.filename for sftp_file in sftp_files])
        self.assertEqual(sftp_filenames, ['file.txt'])

        server_interface = MyTServerInterface()
        self.assertEqual(
            server_interface.check_auth_password(self.username, self.password),
            AUTH_SUCCESSFUL)

        # should fail if user is inactive
        self.user.is_active = False
        self.user.save()

        self.assertEqual(
            server_interface.check_auth_password(self.username, self.password),
            AUTH_FAILED)

        self.user.is_active = True
        self.user.save()

    def test_sftp_key_connect(self):
        server_interface = MyTServerInterface()
        pub_key_str = (
            "AAAAB3NzaC1yc2EAAAADAQABAAAAgQCzvWE391K1pyBvePGpwDWMboSLIp"
            "5L5sMq+bXPPeJPSLOm9dnm8XexZOpeg14UpsYcmrkzVPeooaqz5PqtaHO46CdK11dS"
            "cs2a8PLnavGkJRf25/PDXxlHkiZXXbAfW+6t5aVJxSJ4Jt4FV0aDqMaaYxy4ikw6da"
            "BCkvug2OZQqQ==")

        priv_key_str = """-----BEGIN RSA PRIVATE KEY-----
MIICXgIBAAKBgQCzvWE391K1pyBvePGpwDWMboSLIp5L5sMq+bXPPeJPSLOm9dnm
8XexZOpeg14UpsYcmrkzVPeooaqz5PqtaHO46CdK11dScs2a8PLnavGkJRf25/PD
XxlHkiZXXbAfW+6t5aVJxSJ4Jt4FV0aDqMaaYxy4ikw6daBCkvug2OZQqQIDAQAB
AoGASpK9XlIQD+wqafWdFpf3368O8QdI9CbnPNJkG3sKhWidmR0R7l6rEX/UOah5
hUn4km+jfWe4ZU/GGmNbmkznDdOWspDKs7eeYl7saeRzuX2CdTVvrdU7qmD5+JLk
mXlWWd6rgRIfrFYXYeDVd8p6/kPR4SJe7dTTHuEKKIt9njECQQDhMqjyoNxftpl4
+mwQu0ZDLCZ4afDCGcsf73W3oSmqLyf401vQ6KAp/PmfxqGXY0ewGMzUJn9LFOyP
WOGcDFglAkEAzFL/DI3SYmsvLMt6/vK4qwEwSiJU8byUBj3CL3eL0xjn895GXPzb
9CUMu0fz60Tn7UhbohynPLmQ2w6npbZ9NQJBAN+uujGFpl9LuFV6KCzWV4wRJoUk
dYfWpvQpnfuvkPsBq+pzxhdTeQM7y5bwbUE509MOTyXKt1WUiwQ3fKDLgiECQQCb
Z4zhSYT4ojlRQrqb6pSWS+Mkn5QoAJw9Wv+1BqHsvwa8rxSpaREKUpuqXgGhsdkM
2noHhO+V+jW4xx6vpWr5AkEAgHoSbQUR5uY8ib3N3mNowVi9NhvBN1FkwGStM9W8
QKHf8Ha+rOx3B7Dbljc+Xdpcn9VyRmDlSqzX9aCkr18mNg==
-----END RSA PRIVATE KEY-----"""
        private_key = RSAKey.from_private_key(file_obj=StringIO(priv_key_str))

        # Fail if public key not registered
        self.assertEqual(
            server_interface.check_auth_publickey(self.username, private_key),
            AUTH_FAILED)

        SFTPPublicKey.objects.create(user=self.user,
                                     name="TestKey",
                                     key_type="ssh-rsa",
                                     public_key=pub_key_str)

        # Succeed if public key is registered
        self.assertEqual(
            server_interface.check_auth_publickey(self.username, private_key),
            AUTH_SUCCESSFUL)

        # Should fail if user is inactive
        self.user.is_active = False
        self.user.save()

        self.assertEqual(
            server_interface.check_auth_publickey(self.username, private_key),
            AUTH_FAILED)
        self.user.is_active = True
        self.user.save()

    @patch('webpack_loader.loader.WebpackLoader.get_bundle')
    def test_sftp_dynamic_docs_experiment(self, mock_webpack_get_bundle):
        factory = RequestFactory()
        request = factory.get(
            '/sftp_access/?object_type=experiment&object_id=%s' % self.exp.id)
        request.user = self.user
        response = sftp_access(request)
        path_mapper = make_mapper(settings.DEFAULT_PATH_MAPPER, rootdir=None)
        self.assertIn(
            b"sftp://tardis_user1@testserver:2200"
            b"/home/tardis_user1/experiments/%s" %
            path_mapper(self.exp).encode(), response.content)
        self.assertNotEqual(mock_webpack_get_bundle.call_count, 0)

    @patch('webpack_loader.loader.WebpackLoader.get_bundle')
    def test_sftp_dynamic_docs_dataset(self, mock_webpack_get_bundle):
        factory = RequestFactory()
        request = factory.get(
            '/sftp_access/?object_type=dataset&object_id=%s' % self.dataset.id)
        request.user = self.user
        response = sftp_access(request)
        path_mapper = make_mapper(settings.DEFAULT_PATH_MAPPER, rootdir=None)
        self.assertIn(
            b"sftp://tardis_user1@testserver:2200"
            b"/home/tardis_user1/experiments/%s/%s" % (path_mapper(
                self.exp).encode(), path_mapper(self.dataset).encode()),
            response.content)
        self.assertNotEqual(mock_webpack_get_bundle.call_count, 0)

    def test_cybderduck_connection_window(self):
        factory = RequestFactory()
        request = factory.get('/sftp_access/cyberduck/connection.png')
        request.user = self.user
        response = cybderduck_connection_window(request)
        self.assertEqual(response.status_code, 200)
Exemplo n.º 45
0
class UploadTestCase(TestCase):

    def setUp(self):
        from os import path, mkdir
        from tempfile import mkdtemp

        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)

        self.userProfile = UserProfile(user=self.user)

        self.test_dir = mkdtemp()

        self.exp = Experiment(title='test exp1',
                institution_name='monash', created_by=self.user)
        self.exp.save()

        acl = ExperimentACL(
            pluginId=django_user,
            entityId=str(self.user.id),
            experiment=self.exp,
            canRead=True,
            isOwner=True,
            aclOwnershipType=ExperimentACL.OWNER_OWNED,
            )
        acl.save()

        self.dataset = \
            Dataset(description='dataset description...',
                           experiment=self.exp)
        self.dataset.save()

        self.experiment_path = path.join(settings.FILE_STORE_PATH,
                str(self.dataset.experiment.id))

        self.dataset_path = path.join(self.experiment_path,
                                      str(self.dataset.id))

        if not path.exists(self.experiment_path):
            mkdir(self.experiment_path)
        if not path.exists(self.dataset_path):
            mkdir(self.dataset_path)

        # write test file

        self.filename = 'testfile.txt'

        self.f1 = open(path.join(self.test_dir, self.filename), 'w')
        self.f1.write('Test file 1')
        self.f1.close()

        self.f1_size = path.getsize(path.join(self.test_dir,
                                    self.filename))

        self.f1 = open(path.join(self.test_dir, self.filename), 'r')

    def tearDown(self):
        from shutil import rmtree

        self.f1.close()
        rmtree(self.test_dir)
        rmtree(self.dataset_path)
        rmtree(self.experiment_path)
        self.exp.delete()

    def testFileUpload(self):
        from os import path

        c = Client()
        c.login(username='******', password='******')
        session_id = c.session.session_key

        response = c.post('/upload/' + str(self.dataset.id) + '/',
            {'Filedata': self.f1, 'session_id': session_id})

        test_files_db = \
            Dataset_File.objects.filter(dataset__id=self.dataset.id)

        self.assertTrue(path.exists(path.join(self.dataset_path,
                        self.filename)))
        self.assertTrue(self.dataset.id == 1)
        self.assertTrue(test_files_db[0].url == 'tardis://testfile.txt')

    def testUploadComplete(self):
        from django.http import QueryDict, HttpRequest
        from tardis.tardis_portal.views import upload_complete
        data = [('filesUploaded', '1'), ('speed', 'really fast!'),
                ('allBytesLoaded', '2'), ('errorCount', '0')]
        post = QueryDict('&'.join(['%s=%s' % (k, v) for (k, v) in
                         data]))
        request = HttpRequest()
        request.POST = post
        response = upload_complete(request)
        self.assertTrue('<p>Number: 1</p>' in response.content)
        self.assertTrue('<p>Errors: 0</p>' in response.content)
        self.assertTrue('<p>Bytes: 2</p>' in response.content)
        self.assertTrue('<p>Speed: really fast!</p>'
                        in response.content)
Exemplo n.º 46
0
class ViewTemplateContextsTest(TestCase):

    def setUp(self):
        """
        setting up essential objects, copied from tests above
        """
        Location.force_initialize()
        self.location = Location.get_location('local')

        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)
        self.userProfile = UserProfile(user=self.user).save()
        self.exp = Experiment(title='test exp1',
                              institution_name='monash', created_by=self.user)
        self.exp.save()
        self.acl = ObjectACL(
            pluginId=django_user,
            entityId=str(self.user.id),
            content_object=self.exp,
            canRead=True,
            isOwner=True,
            aclOwnershipType=ObjectACL.OWNER_OWNED,
        )
        self.acl.save()
        self.dataset = Dataset(description='dataset description...')
        self.dataset.save()
        self.dataset.experiments.add(self.exp)
        self.dataset.save()

        self.dataset_file = Dataset_File(dataset=self.dataset,
                                         size=42, filename="foo",
                                         md5sum="junk")
        self.dataset_file.save()
        self.replica = Replica(datafile=self.dataset_file,
                               url="http://foo",
                               location=self.location,
                               verified=False)
        self.replica.save()

    def tearDown(self):
        self.user.delete()
        self.exp.delete()
        self.dataset.delete()
        self.dataset_file.delete()
        self.acl.delete()

    def testExperimentView(self):
        """
        test some template context parameters for an experiment view
        """
        from tardis.tardis_portal.views import view_experiment
        from tardis.tardis_portal.shortcuts import render_response_index
        from django.http import HttpRequest
        from django.template import Context
        import sys

        # Default behavior
        views_module = flexmock(sys.modules['tardis.tardis_portal.views'])
        request = HttpRequest()
        request.user=self.user
        request.groups=[]
        context = {'organization': ['classic', 'test', 'test2'],
                   'default_organization': 'classic',
                   'default_format': 'zip',
                   'protocol': [['zip', '/download/experiment/1/zip/'],
                                ['tar', '/download/experiment/1/tar/']]}
        views_module.should_call('render_response_index'). \
            with_args(_AnyMatcher(), "tardis_portal/view_experiment.html",
                      _ContextMatcher(context))
        response = view_experiment(request, experiment_id=self.exp.id)
        self.assertEqual(response.status_code, 200)

        # Behavior with USER_AGENT_SENSING enabled and a request.user_agent
        saved_setting = getattr(settings, "USER_AGENT_SENSING", None)
        try:
            setattr(settings, "USER_AGENT_SENSING", True)
            request = HttpRequest()
            request.user=self.user
            request.groups=[]
            mock_agent = _MiniMock(os=_MiniMock(family="Macintosh"))
            setattr(request, 'user_agent', mock_agent);
            context = {'organization': ['classic', 'test', 'test2'],
                       'default_organization': 'classic',
                       'default_format': 'tar',
                       'protocol': [['tar', '/download/experiment/1/tar/']]}
            views_module.should_call('render_response_index'). \
                with_args(_AnyMatcher(), "tardis_portal/view_experiment.html",
                          _ContextMatcher(context))
            response = view_experiment(request, experiment_id=self.exp.id)
            self.assertEqual(response.status_code, 200)
        finally:
            if saved_setting != None:
                setattr(settings, "USER_AGENT_SENSING", saved_setting)
            else:
                delattr(settings, "USER_AGENT_SENSING")


    def testDatasetView(self):
        """
        test some context parameters for a dataset view
        """
        from tardis.tardis_portal.views import view_dataset
        from tardis.tardis_portal.shortcuts import render_response_index
        from django.http import HttpRequest
        from django.template import Context
        import sys

        views_module = flexmock(sys.modules['tardis.tardis_portal.views'])
        request = HttpRequest()
        request.user=self.user
        request.groups=[]
        context = {'default_organization': 'classic',
                   'default_format': 'zip'}
        views_module.should_call('render_response_index'). \
            with_args(_AnyMatcher(), "tardis_portal/view_dataset.html",
                      _ContextMatcher(context))
        response = view_dataset(request, dataset_id=self.dataset.id)
        self.assertEqual(response.status_code, 200)

        # Behavior with USER_AGENT_SENSING enabled and a request.user_agent
        saved_setting = getattr(settings, "USER_AGENT_SENSING", None)
        try:
            setattr(settings, "USER_AGENT_SENSING", True)
            request = HttpRequest()
            request.user=self.user
            request.groups=[]
            mock_agent = _MiniMock(os=_MiniMock(family="Macintosh"))
            setattr(request, 'user_agent', mock_agent);
            context = {'default_organization': 'classic',
                       'default_format': 'tar'}
            views_module.should_call('render_response_index'). \
                with_args(_AnyMatcher(), "tardis_portal/view_dataset.html",
                          _ContextMatcher(context))
            response = view_dataset(request, dataset_id=self.dataset.id)
            self.assertEqual(response.status_code, 200)
        finally:
            if saved_setting != None:
                setattr(settings, "USER_AGENT_SENSING", saved_setting)
            else:
                delattr(settings, "USER_AGENT_SENSING")
Exemplo n.º 47
0
class ContextualViewTest(TestCase):

    def setUp(self):
        """
        setting up essential objects, copied from tests above
        """
        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)
        self.userProfile = UserProfile(user=self.user).save()
        self.exp = Experiment(title='test exp1',
                              institution_name='monash', created_by=self.user)
        self.exp.save()
        self.acl = ObjectACL(
            pluginId=django_user,
            entityId=str(self.user.id),
            content_object=self.exp,
            canRead=True,
            isOwner=True,
            aclOwnershipType=ObjectACL.OWNER_OWNED,
        )
        self.acl.save()
        self.dataset = Dataset(description='dataset description...')
        self.dataset.save()
        self.dataset.experiments.add(self.exp)
        self.dataset.save()

        self.dataset_file = Dataset_File(dataset=self.dataset,
                                         size=42, filename="foo",
                                         md5sum="junk")
        self.dataset_file.save()

        self.testschema = Schema(namespace="http://test.com/test/schema",
                                 name="Test View",
                                 type=Schema.DATAFILE,
                                 hidden=True)
        self.testschema.save()
        self.dfps = DatafileParameterSet(dataset_file=self.dataset_file,
                                         schema=self.testschema)
        self.dfps.save()

    def tearDown(self):
        self.user.delete()
        self.exp.delete()
        self.dataset.delete()
        self.dataset_file.delete()
        self.testschema.delete()
        self.dfps.delete()
        self.acl.delete()

    def testDetailsDisplay(self):
        """
        test display of view for an existing schema and no display for an undefined one.
        """
        from tardis.tardis_portal.views import display_datafile_details
        request = flexmock(user=self.user, groups=[("testgroup",flexmock())])
        with self.settings(DATAFILE_VIEWS=[("http://test.com/test/schema", "/test/url"),
                                           ("http://does.not.exist", "/false/url")]):
            response = display_datafile_details(request, dataset_file_id=self.dataset_file.id)
            self.assertEqual(response.status_code, 200)
            self.assertTrue("/ajax/parameters/" in response.content)
            self.assertTrue("/test/url" in response.content)
            self.assertFalse("/false/url" in response.content)
Exemplo n.º 48
0
class StageFilesTestCase(TestCase):

    def setUp(self):
        # Create test owner without enough details
        username, email, password = ('testuser',
                                     '*****@*****.**',
                                     'password')
        user = User.objects.create_user(username, email, password)
        profile = UserProfile(user=user, isDjangoAccount=True)
        profile.save()
        # Need UserAuthentication
        UserAuthentication(userProfile=profile,
                           username=username,
                           authenticationMethod='localdb').save()
        # Create staging dir
        from os import path, makedirs
        staging_dir = path.join(settings.STAGING_PATH, username)
        if not path.exists(staging_dir):
            makedirs(staging_dir)
        # Ensure that staging dir is set up properly
        expect(get_full_staging_path(username)).to_be_truthy()

        Location.force_initialize()

        # Create test experiment and make user the owner of it
        experiment = Experiment(title='Text Experiment',
                                institution_name='Test Uni',
                                created_by=user)
        experiment.save()
        acl = ObjectACL(
            pluginId=django_user,
            entityId=str(user.id),
            content_object=experiment,
            canRead=True,
            isOwner=True,
            aclOwnershipType=ObjectACL.OWNER_OWNED,
        )
        acl.save()

        self.dataset = \
            Dataset(description='dataset description...')
        self.dataset.save()
        self.dataset.experiments.add(experiment)
        self.dataset.save()

        self.username, self.password = (username, password)

    def _get_authenticated_client(self):
        client = Client()
        # Login as user
        login = client.login(username=self.username, password=self.password)
        self.assertTrue(login)
        # Return authenticated client
        return client


    def _get_staging_url(self):
        return reverse('tardis.tardis_portal.views.stage_files_to_dataset',
                       args=[str(self.dataset.id)])

    def testForbiddenWithoutLogin(self):
        client = Client()
        response = client.get(self._get_staging_url())
        # Expect a redirect to login
        expect(response.status_code).to_equal(302)
        login_url = reverse('tardis.tardis_portal.views.login')
        ensure(login_url in response['Location'], True,
               "Redirect URL was not to login.")

    def testPostOnlyMethodAllowed(self):
        client = self._get_authenticated_client()

        for method in (x.lower() for x in ['GET', 'HEAD', 'PUT', 'OPTIONS']):
            response = getattr(client, method)(self._get_staging_url())
            # Expect a 405 Method Not Allowed
            expect(response.status_code).to_equal(405)
            # Expect valid "Allow" header
            response['Allow'] = 'POST'

        response = client.post(self._get_staging_url())
        # Expect something other than a 405
        self.assertFalse(response.status_code == 405)

    def testRequiresJSON(self):
        client = Client()

        # Login as user
        login = client.login(username=self.username, password=self.password)
        self.assertTrue(login)

        response = client.post(self._get_staging_url())
        # Expect 400 Bad Request because we didn't have a payload
        expect(response.status_code).to_equal(400)

        response = client.post(self._get_staging_url(),
                               data={'files': ['foo', 'bar']})
        # Expect 400 Bad Request because we didn't have a JSON payload
        expect(response.status_code).to_equal(400)

        response = client.post(self._get_staging_url(),
                               data=json.dumps({'files': ['foo', 'bar']}),
                               content_type='application/octet-stream')
        # Expect 400 Bad Request because we didn't have a JSON Content-Type
        expect(response.status_code).to_equal(400)

    def testStageFile(self):
        client = self._get_authenticated_client()

        staging_dir = get_full_staging_path(self.username)

        from os.path import basename
        from tempfile import NamedTemporaryFile
        with NamedTemporaryFile('w', dir=staging_dir) as f:
            # Write some content
            f.write('This is just some content')
            f.flush()

            data = [ f.name ]
            content_type = 'application/json; charset=utf-8'
            response = client.post(self._get_staging_url(),
                                   data=json.dumps(data),
                                   content_type=content_type)

            # Expect 201 Created
            expect(response.status_code).to_equal(201)
            # Expect to get the email address of
            # staging user back
            # Can't test for async file staging
            emails = json.loads(response.content)
            expect(len(emails)).to_equal(1)
Exemplo n.º 49
0
    def test_datafile(self):
        from tardis.tardis_portal.models import Experiment, Dataset

        def _build(dataset, filename, url, protocol):
            from tardis.tardis_portal.models import \
                Dataset_File, Replica, Location
            datafile = Dataset_File(dataset=dataset, filename=filename)
            datafile.save()
            replica = Replica(datafile=datafile, url=url, 
                              protocol=protocol,
                              location=Location.get_default_location())
            replica.save()
            return datafile

        exp = Experiment(title='test exp1',
                         institution_name='monash',
                         approved=True,
                         created_by=self.user,
                         public_access=Experiment.PUBLIC_ACCESS_NONE)
        exp.save()

        dataset = Dataset(description="dataset description...")
        dataset.save()
        dataset.experiments.add(exp)
        dataset.save()

        save1 = settings.REQUIRE_DATAFILE_SIZES
        save2 = settings.REQUIRE_DATAFILE_CHECKSUMS
        try:
            settings.REQUIRE_DATAFILE_SIZES = False
            settings.REQUIRE_DATAFILE_CHECKSUMS = False
            df_file = _build(dataset, 'file.txt', 'path/file.txt', '')
            self.assertEqual(df_file.filename, 'file.txt')
            self.assertEqual(df_file.get_preferred_replica().url, 
                             'path/file.txt')
            self.assertEqual(df_file.get_preferred_replica().protocol, '')
            self.assertEqual(df_file.dataset, dataset)
            self.assertEqual(df_file.size, '')
            self.assertEqual(df_file.get_download_url(), 
                             '/test/download/datafile/1/')
            self.assertTrue(df_file.is_local())
            
            df_file = _build(dataset, 'file1.txt', 'path/file1.txt', 'vbl')
            self.assertEqual(df_file.filename, 'file1.txt')
            self.assertEqual(df_file.get_preferred_replica().url,
                             'path/file1.txt')
            self.assertEqual(df_file.get_preferred_replica().protocol, 'vbl')
            self.assertEqual(df_file.dataset, dataset)
            self.assertEqual(df_file.size, '')
            self.assertEqual(df_file.get_download_url(),
                             '/test/vbl/download/datafile/2/')
            self.assertFalse(df_file.is_local())
            
            df_file = _build(dataset, 'f.txt',
                             'http://localhost:8080/filestore/f.txt', '')
            self.assertEqual(df_file.filename, 'f.txt')
            self.assertEqual(df_file.get_preferred_replica().url,
                             'http://localhost:8080/filestore/f.txt')
            self.assertEqual(df_file.get_preferred_replica().protocol, '')
            self.assertEqual(df_file.dataset, dataset)
            self.assertEqual(df_file.size, '')
            self.assertEqual(df_file.get_download_url(),
                             '/test/download/datafile/3/')
            self.assertFalse(df_file.is_local())
            # Now check the 'REQUIRE' config params
            with self.assertRaises(Exception):
                settings.REQUIRE_DATAFILE_SIZES = True
                settings.REQUIRE_DATAFILE_CHECKSUMS = False
                Dataset_File(dataset=dataset, filename='foo.txt', md5sum='bad')
            with self.assertRaises(Exception):
                settings.REQUIRE_DATAFILE_SIZES = False
                settings.REQUIRE_DATAFILE_CHECKSUMS = True
                Dataset_File(dataset=dataset, filename='foo.txt', size='1')
                
        finally:
            settings.REQUIRE_DATAFILE_SIZES = save1
            settings.REQUIRE_DATAFILE_CHECKSUMS = save2
Exemplo n.º 50
0
class SimpleSearchTest(MyTardisResourceTestCase):
    def setUp(self):
        super(SimpleSearchTest, self).setUp()
        self.out = StringIO()
        call_command('search_index',
                     stdout=self.out,
                     action='delete',
                     force=True)
        call_command('search_index',
                     stdout=self.out,
                     action='rebuild',
                     force=True)
        # add dataset and datafile to experiment
        self.dataset1 = Dataset(description='test_dataset')
        self.dataset1.save()
        self.dataset1.experiments.add(self.testexp)
        self.dataset1.save()
        settings.REQUIRE_DATAFILE_SIZES = False
        settings.REQUIRE_DATAFILE_CHECKSUMS = False
        self.datafile = DataFile(dataset=self.dataset1, filename='test.txt')
        self.datafile.save()

    def test_simple_search_authenticated_user(self):
        response = self.api_client.get(
            '/api/v1/search_simple-search/?query=test',
            authentication=self.get_credentials())
        data = json.loads(response.content.decode())
        self.assertEqual(len(data['objects'][0]['hits']['experiments']), 1)
        self.assertEqual(len(data['objects'][0]['hits']['datasets']), 1)
        self.assertEqual(len(data['objects'][0]['hits']['datafiles']), 1)

    def test_simple_search_unauthenticated_user(self):
        self.testexp.public_access = 100
        self.testexp.save()
        response = self.api_client.get(
            '/api/v1/search_simple-search/?query=test')
        data = json.loads(response.content.decode())
        self.assertEqual(len(data['objects'][0]['hits']['experiments']), 1)
        self.assertEqual(len(data['objects'][0]['hits']['datasets']), 1)
        self.assertEqual(len(data['objects'][0]['hits']['datafiles']), 1)

    def test_advance_search_unauthenticated_user(self):
        self.testexp.public_access = 100
        self.testexp.save()
        response = self.api_client.post(
            '/api/v1/search_advance-search/',
            data={
                "text": "test",
                "TypeTag": ["Dataset", "Experiment", "Datafile"]
            })
        data = json.loads(response.content.decode())
        self.assertEqual(len(data['hits']['experiments']), 1)
        self.assertEqual(len(data['hits']['datasets']), 1)
        self.assertEqual(len(data['hits']['datafiles']), 1)

    def test_advance_search_authenticated_user(self):
        response = self.api_client.post(
            '/api/v1/search_advance-search/',
            data={
                "text": "test",
                "TypeTag": ["Dataset", "Experiment", "Datafile"]
            },
            authentication=self.get_credentials())
        data = json.loads(response.content.decode())
        self.assertEqual(len(data['hits']['experiments']), 1)
        self.assertEqual(len(data['hits']['datasets']), 1)
        self.assertEqual(len(data['hits']['datafiles']), 1)
Exemplo n.º 51
0
class DownloadTestCase(TestCase):
    def setUp(self):
        # create a test user
        self.user = User.objects.create_user(username='******',
                                             email='',
                                             password='******')

        # create a public experiment
        self.experiment1 = Experiment(
            title='Experiment 1',
            created_by=self.user,
            public_access=Experiment.PUBLIC_ACCESS_FULL)
        self.experiment1.save()

        # create a non-public experiment
        self.experiment2 = Experiment(
            title='Experiment 2',
            created_by=self.user,
            public_access=Experiment.PUBLIC_ACCESS_NONE)
        self.experiment2.save()

        # dataset1 belongs to experiment1
        self.dataset1 = Dataset(description='dangerous;name')
        self.dataset1.save()
        self.dataset1.experiments.add(self.experiment1)
        self.dataset1.save()

        # dataset2 belongs to experiment2
        self.dataset2 = Dataset(description='terrible\nname')
        self.dataset2.save()
        self.dataset2.experiments.add(self.experiment2)
        self.dataset2.save()

        # absolute path first
        filename1 = 'testfile.txt'
        filename2 = 'testfile.tiff'
        self.dest1 = abspath(
            join(settings.FILE_STORE_PATH,
                 '%s/%s/' % (self.experiment1.id, self.dataset1.id)))
        self.dest2 = abspath(
            join(settings.FILE_STORE_PATH,
                 '%s/%s/' % (self.experiment2.id, self.dataset2.id)))
        if not exists(self.dest1):
            makedirs(self.dest1)
        if not exists(self.dest2):
            makedirs(self.dest2)

        testfile1 = abspath(join(self.dest1, filename1))
        f = open(testfile1, 'w')
        f.write("Hello World!\n")
        f.close()

        testfile2 = abspath(join(self.dest2, filename2))
        _generate_test_image(testfile2)

        self.datafile1 = self._build_datafile(testfile1, filename1,
                                              self.dataset1)

        self.datafile2 = self._build_datafile(testfile2, filename2,
                                              self.dataset2)

    def _build_datafile(self,
                        testfile,
                        filename,
                        dataset,
                        checksum=None,
                        size=None,
                        mimetype=''):
        filesize, sha512sum = get_size_and_sha512sum(testfile)
        datafile = DataFile(dataset=dataset,
                            filename=filename,
                            mimetype=mimetype,
                            size=size if size is not None else filesize,
                            sha512sum=(checksum if checksum else sha512sum))
        datafile.save()
        dfo = DataFileObject(datafile=datafile,
                             storage_box=datafile.get_default_storage_box())
        dfo.save()
        with open(testfile, 'r') as sourcefile:
            dfo.file_object = sourcefile
        return DataFile.objects.get(pk=datafile.pk)

    def tearDown(self):
        self.user.delete()
        self.experiment1.delete()
        self.experiment2.delete()
        rmtree(self.dest1)
        rmtree(self.dest2)

    def testView(self):
        client = Client()

        # check view of file1
        response = client.get('/datafile/view/%i/' % self.datafile1.id)

        self.assertEqual(response['Content-Disposition'],
                         'inline; filename="%s"' % self.datafile1.filename)
        self.assertEqual(response.status_code, 200)
        response_content = ""
        for c in response.streaming_content:
            response_content += c
        self.assertEqual(response_content, 'Hello World!\n')

        # check view of file2
        response = client.get('/datafile/view/%i/' % self.datafile2.id)
        # Should be forbidden
        self.assertEqual(response.status_code, 403)

        self.experiment2.public_access = Experiment.PUBLIC_ACCESS_FULL
        self.experiment2.save()
        # check view of file2 again
        response = client.get('/datafile/view/%i/' % self.datafile2.id)
        self.assertEqual(response.status_code, 200)

        # The following behaviour relies on ImageMagick
        if IMAGEMAGICK_AVAILABLE:
            # file2 should have a ".png" filename
            self.assertEqual(
                response['Content-Disposition'],
                'inline; filename="%s"' % (self.datafile2.filename + '.png'))
            # file2 should be a PNG
            self.assertEqual(response['Content-Type'], 'image/png')
            png_signature = "\x89PNG\r\n\x1a\n"
            self.assertEqual(response.content[0:8], png_signature)
        else:
            # file2 should have a ".tiff" filename
            self.assertEqual(
                response['Content-Disposition'],
                'inline; filename="%s"' % (self.datafile2.filename))
            # file2 should be a TIFF
            self.assertEqual(response['Content-Type'], 'image/tiff')
            tiff_signature = "II\x2a\x00"
            self.assertEqual(response.content[0:4], tiff_signature)

    def _check_tar_file(self,
                        content,
                        rootdir,
                        datafiles,
                        simpleNames=False,
                        noTxt=False):
        with NamedTemporaryFile('w') as tempfile:
            for c in content:
                tempfile.write(c)
            tempfile.flush()
            if getsize(tempfile.name) > 0:
                expect(is_tarfile(tempfile.name)).to_be_truthy()
                try:
                    tf = TarFile(tempfile.name, 'r')
                    self._check_names(datafiles, tf.getnames(), rootdir,
                                      simpleNames, noTxt)
                finally:
                    tf.close()
            else:
                self._check_names(datafiles, [], rootdir, simpleNames, noTxt)

    def _check_zip_file(self,
                        content,
                        rootdir,
                        datafiles,
                        simpleNames=False,
                        noTxt=False):
        with NamedTemporaryFile('w') as tempfile:
            for c in content:
                tempfile.write(c)
            tempfile.flush()
            # It should be a zip file
            expect(is_zipfile(tempfile.name)).to_be_truthy()
            try:
                zf = ZipFile(tempfile.name, 'r')
                self._check_names(datafiles, zf.namelist(), rootdir,
                                  simpleNames, noTxt)
            finally:
                zf.close()

    def _check_names(self, datafiles, names, rootdir, simpleNames, noTxt):
        # SimpleNames says if we expect basenames or pathnames
        # NoTxt says if we expect '.txt' files to be filtered out
        for name in names:
            self.assertNotRegexpMatches(name, '\n|;')
        expect(len(names)).to_equal(len(datafiles))

    def testDownload(self):
        client = Client()

        # check download for experiment1 as tar
        response = client.get('/download/experiment/%i/tar/' %
                              self.experiment1.id)
        self.assertEqual(
            response['Content-Disposition'],
            'attachment; filename="%s-complete.tar"' %
            self.experiment1.title.replace(' ', '_'))
        self.assertEqual(response.status_code, 200)
        self._check_tar_file(
            response.streaming_content,
            str(self.experiment1.title.replace(' ', '_')),
            reduce(lambda x, y: x + y, [
                ds.datafile_set.all()
                for ds in self.experiment1.datasets.all()
            ]))

        # check download of file1
        response = client.get('/download/datafile/%i/' % self.datafile1.id)

        self.assertEqual(response['Content-Disposition'],
                         'attachment; filename="%s"' % self.datafile1.filename)
        self.assertEqual(response.status_code, 200)
        response_content = ""
        for c in response.streaming_content:
            response_content += c
        self.assertEqual(response_content, 'Hello World!\n')

        # requesting file2 should be forbidden...
        response = client.get('/download/datafile/%i/' % self.datafile2.id)
        self.assertEqual(response.status_code, 403)

        # check dataset1 download as tar
        response = client.post(
            '/download/datafiles/', {
                'expid': self.experiment1.id,
                'dataset': [self.dataset1.id],
                'datafile': [],
                'comptype': 'tar'
            })
        self.assertEqual(response.status_code, 200)
        self._check_tar_file(response.streaming_content,
                             'Experiment 1-selection',
                             self.dataset1.datafile_set.all())

        # check dataset2 download
        response = client.post(
            '/download/datafiles/', {
                'expid': self.experiment2.id,
                'dataset': [self.dataset2.id],
                'datafile': []
            })
        self.assertEqual(response.status_code, 403)

        # check datafile1 download via POST
        response = client.post(
            '/download/datafiles/', {
                'expid': self.experiment1.id,
                'dataset': [],
                'datafile': [self.datafile1.id]
            })
        self.assertEqual(response.status_code, 200)
        self._check_tar_file(response.streaming_content,
                             'Experiment 1-selection', [self.datafile1])

        # check datafile2 download via POST
        response = client.post(
            '/download/datafiles/', {
                'expid': self.experiment2.id,
                'dataset': [],
                'datafile': [self.datafile2.id]
            })
        self.assertEqual(response.status_code, 403)

        # Check datafile2 download with second experiment to "metadata only"
        self.experiment2.public_access = Experiment.PUBLIC_ACCESS_METADATA
        self.experiment2.save()
        response = client.get('/download/datafile/%i/' % self.datafile2.id)
        # Metadata-only means "no file access"!
        self.assertEqual(response.status_code, 403)

        # Check datafile2 download with second experiment to public
        self.experiment2.public_access = Experiment.PUBLIC_ACCESS_FULL
        self.experiment2.save()
        response = client.get('/download/datafile/%i/' % self.datafile2.id)
        self.assertEqual(response.status_code, 200)
        # This should be a TIFF (which often starts with "II\x2a\x00")
        self.assertEqual(response['Content-Type'], 'image/tiff')
        response_content = ""
        for c in response.streaming_content:
            response_content += c
        self.assertEqual(response_content[0:4], "II\x2a\x00")

        # check experiment tar download with alternative organization
        response = client.get('/download/experiment/%i/tar/' %
                              self.experiment1.id)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(
            response['Content-Disposition'],
            'attachment; filename="%s-complete.tar"' %
            self.experiment1.title.replace(' ', '_'))
        self._check_tar_file(response.streaming_content,
                             str(self.experiment1.id),
                             reduce(lambda x, y: x + y, [
                                 ds.datafile_set.all()
                                 for ds in self.experiment1.datasets.all()
                             ]),
                             simpleNames=True)

        # check experiment1 download with '.txt' filtered out (none left)
        response = client.get('/download/experiment/%i/tar/' %
                              self.experiment1.id)
        self.assertEqual(response.status_code, 200)

        # check experiment2 download with '.txt' filtered out
        response = client.get('/download/experiment/%i/tar/' %
                              self.experiment2.id)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(
            response['Content-Disposition'],
            'attachment; filename="%s-complete.tar"' %
            self.experiment2.title.replace(' ', '_'))
        self._check_tar_file(response.streaming_content,
                             str(self.experiment2.id),
                             reduce(lambda x, y: x + y, [
                                 ds.datafile_set.all()
                                 for ds in self.experiment2.datasets.all()
                             ]),
                             simpleNames=True,
                             noTxt=True)

    def testDatasetFile(self):
        # check registered text file for physical file meta information
        df = DataFile.objects.get(pk=self.datafile1.id)  # skipping test # noqa # pylint: disable=W0101

        try:
            from magic import Magic
            self.assertEqual(df.mimetype, 'text/plain; charset=us-ascii')
        except:
            # XXX Test disabled because lib magic can't be loaded
            pass
        self.assertEqual(df.size, 13)
        self.assertEqual(df.md5sum, '8ddd8be4b179a529afa5f2ffae4b9858')

        # Now check we can calculate checksums and infer the mime type
        # for a JPG file.
        filename = 'tardis/tardis_portal/tests/test_data/ands-logo-hi-res.jpg'

        dataset = Dataset.objects.get(pk=self.dataset1.id)

        pdf1 = self._build_datafile(filename, basename(filename), dataset)
        self.assertEqual(pdf1.file_objects.get().verify(), True)
        pdf1 = DataFile.objects.get(pk=pdf1.pk)

        try:
            from magic import Magic  # noqa
            self.assertEqual(pdf1.mimetype, 'image/jpeg')
        except:
            # XXX Test disabled because lib magic can't be loaded
            pass
        self.assertEqual(pdf1.size, 14232)
        self.assertEqual(pdf1.md5sum, 'c450d5126ffe3d14643815204daf1bfb')

        # Now check that we can override the physical file meta information
        # We are setting size/checksums that don't match the actual file, so
        # the
        pdf2 = self._build_datafile(
            filename,
            filename,
            dataset,
            checksum=
            'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e',
            size=0,
            mimetype=
            'application/vnd.openxmlformats-officedocument.presentationml.presentation'
        )  # noqa
        self.assertEqual(pdf2.size, 0)
        self.assertEqual(pdf2.md5sum, '')
        self.assertEqual(pdf2.file_objects.get().verified, False)
        pdf2 = DataFile.objects.get(pk=pdf2.pk)
        try:
            from magic import Magic  # noqa
            self.assertEqual(
                pdf2.mimetype,
                'application/vnd.openxmlformats-officedocument.presentationml.presentation'
            )  # noqa
        except:
            # XXX Test disabled because lib magic can't be loaded
            pass
        self.assertEqual(pdf2.size, 0)
        self.assertEqual(pdf2.md5sum, '')

        pdf2.mimetype = ''
        pdf2.save()
        pdf2.file_objects.get().save()
        pdf2 = DataFile.objects.get(pk=pdf2.pk)

        try:
            from magic import Magic  # noqa
            self.assertEqual(pdf2.mimetype, 'application/pdf')
        except:
            # XXX Test disabled because lib magic can't be loaded
            pass
Exemplo n.º 52
0
class DownloadTestCase(TestCase):

    def setUp(self):
        # create a test user
        self.user = User.objects.create_user(username='******',
                                             email='',
                                             password='******')

        # create a public experiment
        self.experiment1 = Experiment(title='Experiment 1',
                                      created_by=self.user,
                                      public=True)
        self.experiment1.save()

        # create a non-public experiment
        self.experiment2 = Experiment(title='Experiment 2',
                                      created_by=self.user,
                                      public=False)
        self.experiment2.save()

        # dataset1 belongs to experiment1
        self.dataset1 = Dataset(experiment=self.experiment1)
        self.dataset1.save()

        # dataset2 belongs to experiment2
        self.dataset2 = Dataset(experiment=self.experiment2)
        self.dataset2.save()

        # absolute path first
        filename = 'testfile.txt'
        self.dest1 = abspath(join(settings.FILE_STORE_PATH, '%s/%s/'
                                  % (self.experiment1.id,
                                  self.dataset1.id)))
        self.dest2 = abspath(join(settings.FILE_STORE_PATH,
                                '%s/%s/'
                                  % (self.experiment2.id,
                                  self.dataset2.id)))
        if not exists(self.dest1):
            makedirs(self.dest1)
        if not exists(self.dest2):
            makedirs(self.dest2)

        testfile1 = abspath(join(self.dest1, filename))
        f = open(testfile1, 'w')
        f.write("Hello World!\n")
        f.close()

        testfile2 = abspath(join(self.dest2, filename))
        f = open(testfile2, 'w')
        f.write("Hello World!\n")
        f.close()

        self.dataset_file1 = Dataset_File(dataset=self.dataset1,
                                          filename=filename,
                                          protocol='tardis',
                                          url='tardis://%s' % filename)
        self.dataset_file1.save()

        self.dataset_file2 = Dataset_File(dataset=self.dataset2,
                                          filename=basename(filename),
                                          protocol='tardis',
                                          url='tardis://%s' % filename)
        self.dataset_file2.save()

    def tearDown(self):
        self.user.delete()
        self.experiment1.delete()
        self.experiment2.delete()
        rmtree(self.dest1)
        rmtree(self.dest2)

    def testDownload(self):
        client = Client()

        # check download for experiment1
        response = client.get('/download/experiment/%i/zip/' % self.experiment1.id)
        self.assertEqual(response['Content-Disposition'],
                         'attachment; filename="experiment%s-complete.zip"'
                         % self.experiment1.id)
        self.assertEqual(response.status_code, 200)

        # check download of file1
        response = client.get('/download/datafile/%i/' % self.dataset_file1.id)

        self.assertEqual(response['Content-Disposition'],
                         'attachment; filename="%s"'
                         % self.dataset_file2.filename)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.content, 'Hello World!\n')

        # requesting file2 should be forbidden...
        response = client.get('/download/datafile/%i/' % self.dataset_file2.id)
        self.assertEqual(response.status_code, 403)

        # check dataset1 download
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment1.id,
                                'dataset': [self.dataset1.id],
                                'datafile': []})
        self.assertEqual(response.status_code, 200)

        # check dataset2 download
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment2.id,
                                'dataset': [self.dataset2.id],
                                'datafile': []})
        self.assertEqual(response.status_code, 403)

        # check datafile1 download via POST
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment1.id,
                                'dataset': [],
                                'datafile': [self.dataset_file1.id]})
        self.assertEqual(response.status_code, 200)

        # check datafile2 download via POST
        response = client.post('/download/datafiles/',
                               {'expid': self.experiment2.id,
                                'dataset': [],
                                'datafile': [self.dataset_file2.id]})
        self.assertEqual(response.status_code, 403)

    def testDatasetFile(self):

        # check registered text file for physical file meta information
        df = Dataset_File.objects.get(pk=self.dataset_file1.id)

        try:
            from magic import Magic
            self.assertEqual(df.mimetype, 'text/plain; charset=us-ascii')
        except:
            # XXX Test disabled becuse lib magic can't be loaded
            pass
        self.assertEqual(df.size, str(13))
        self.assertEqual(df.md5sum, '8ddd8be4b179a529afa5f2ffae4b9858')

        # now check a pdf file
        filename = join(abspath(dirname(__file__)),
                        '../static/downloads/DatasetDepositionGuide.pdf')

        dataset = Dataset.objects.get(pk=self.dataset1.id)

        pdf1 = Dataset_File(dataset=dataset,
                            filename=basename(filename),
                            url='file://%s' % filename,
                            protocol='file')
        pdf1.save()
        try:
            from magic import Magic
            self.assertEqual(pdf1.mimetype, 'application/pdf')
        except:
            # XXX Test disabled becuse lib magic can't be loaded
            pass
        self.assertEqual(pdf1.size, str(1008475))
        self.assertEqual(pdf1.md5sum, '9192b3d3e0056412b1d21d3e33562eba')

        # now check that we can override the physical file meta information
        pdf2 = Dataset_File(dataset=dataset,
                            filename=basename(filename),
                            url='file://%s' % filename,
                            protocol='file',
                            mimetype='application/vnd.openxmlformats-officedocument.presentationml.presentation',
                            size=str(0),
                            md5sum='md5sum')
        pdf2.save()
        try:
            from magic import Magic
            self.assertEqual(pdf2.mimetype, 'application/vnd.openxmlformats-officedocument.presentationml.presentation')
        except:
            # XXX Test disabled becuse lib magic can't be loaded
            pass
        self.assertEqual(pdf2.size, str(0))
        self.assertEqual(pdf2.md5sum, 'md5sum')

        pdf2.mimetype = ''
        pdf2.save()

        try:
            from magic import Magic
            self.assertEqual(pdf2.mimetype, 'application/pdf')
        except:
            # XXX Test disabled becuse lib magic can't be loaded
            pass
Exemplo n.º 53
0
class ViewsTestCase(TestCase):

    def setUp(self):
        from django.contrib.auth.models import User
        from tardis.tardis_portal.models import Experiment
        from tardis.tardis_portal.models import ExperimentACL
        from tardis.tardis_portal.models import Dataset
        from tardis.apps.mrtardis.mrtask import MRtask
        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)
        self.client = Client()
        self.client.login(username=user, password=pwd)
        self.experiment = Experiment(approved=True,
                                     title="Test Experiment",
                                     institution_name="Test Institution",
                                     created_by=self.user,
                                     public=False)
        self.experiment.save()
        acl = ExperimentACL(pluginId="django_user",
                            entityId="1",
                            experiment=self.experiment,
                            canRead=True,
                            canWrite=True,
                            canDelete=True,
                            isOwner=True)
        acl.save()
        self.test_dataset = Dataset(experiment=self.experiment,
                                    description="test dataset")
        self.test_dataset.save()
        self.test_mrtask = MRtask(dataset=self.test_dataset)

    def test_view_index(self):
        response = self.client.get('/apps/mrtardis/index/%d/' %
                                   self.experiment.id,
                                   HTTP_X_REQUESTED_WITH='XMLHttpRequest')
        self.assertEqual(response.status_code, 200)

    def test_hpc_user_setup(self):
        import tardis.apps.mrtardis.utils as utils
        testresult = utils.test_hpc_connection(self.user)
        self.assertEqual(testresult, False)

    def test_views_test_user_setup(self):
        # with POST:
        posturl = '/apps/mrtardis/test_user_setup/%d/' % self.experiment.id
        response = self.client.post(posturl, {'hpc_username': '******'},
                                    HTTP_X_REQUESTED_WITH='XMLHttpRequest')
        self.assertEqual(response.status_code, 302)
        from tardis.apps.mrtardis.models import HPCUser
        self.assertEqual(HPCUser.objects.get(user=self.user).hpc_username,
                         'john')

    def test_views_MRform(self):
        posturl = '/apps/mrtardis/MRform/%d/' % self.experiment.id
        postdataarray = [{'postdata':
                              {'action': 'newDS',
                               'description': 'who is testing the testers?'},
                          'expectedoutcome': ""},
                         {'postdata':
                              {'action': 'continue',
                               'dataset': '1'},
                          'expectedoutcome': ""},
                         {'postdata':
                              {'action': 'rerunDS',
                               'dataset': '1',
                               'description': "test-rerun"},
                          'expectedoutcome': ""},
                         ]
        for actiontype in postdataarray:
            postdata = actiontype['postdata']
            response = self.client.post(posturl, postdata,
                                        HTTP_X_REQUESTED_WITH='XMLHttpRequest')
            self.assertEqual(response.status_code, 200)

#    def test_views_displayResults(self):
#        posturl = '/apps/mrtardis/displayResults/%d/' % self.experiment.id
#        response = self.client.post(posturl, {'dataset': 1},
#                                    HTTP_X_REQUESTED_WITH='XMLHttpRequest')
#        self.assertEqual(response.status_code, 200)

    def test_views_type_filtered_file_list(self):
        posturl = '/apps/mrtardis/type_filtered_file_list/%d/' %\
            self.test_dataset.id
        filetypes = [".mtz", ".pdb"]
        for filetype in filetypes:
            response = self.client.post(posturl, {'type': filetype},
                                        HTTP_X_REQUESTED_WITH='XMLHttpRequest')
            self.assertEqual(response.status_code, 200)

    def test_views_add_pdb_files(self):
        geturl = '/apps/mrtardis/add_pdb_files/%d/' %\
            self.test_dataset.id
        response = self.client.get(geturl,
                                   HTTP_X_REQUESTED_WITH='XMLHttpRequest')
        self.assertEqual(response.status_code, 200)
class MyTardisHSMTasksTestCase(TestCase):
    """Tests for mytardis_hsm.tasks"""

    def setUp(self):
        """Setup test fixtures if needed."""
        self.user = User.objects.create_user("doctor", '',
                                             "pwd")

        self.exp = Experiment(title="Wonderful",
                              institution_name="Monash University",
                              created_by=self.user)
        self.exp.save()

        group = Group(name="Group1")
        group.save()

        facility = Facility(name="Test Facility",
                            manager_group=group)
        facility.save()

        inst = Instrument(name="Test Instrument1",
                          facility=facility)
        inst.save()

        self.dataset = Dataset(description="Dataset1",
                               instrument=inst)
        self.dataset.save()

        storage_classes = getattr(settings,
                                  "HSM_STORAGE_CLASSES",
                                  DEFAULT_HSM_CLASSES)
        self.sbox1 = StorageBox(name="SBOX1",
                                django_storage_class=storage_classes[0],
                                status='online', max_size=256)
        self.sbox1.save()
        sbox1_attr = StorageBoxAttribute(storage_box=self.sbox1,
                                         key='type',
                                         value=StorageBox.DISK)
        sbox1_attr.save()
        sbox1_loc_opt = StorageBoxOption(storage_box=self.sbox1,
                                         key="location",
                                         value=tempfile.gettempdir())
        sbox1_loc_opt.save()

        self.sbox2 = StorageBox(
            name="SBOX2",
            django_storage_class="any.non.disk.StorageSystem",
            status='offline', max_size=256)
        self.sbox2.save()
        sbox2_attr = StorageBoxAttribute(storage_box=self.sbox2,
                                         key='type',
                                         value=StorageBox.TAPE)
        sbox2_attr.save()

    @mock.patch("os.stat")
    def test_000_update_df_status_offline(self, mock_stat):
        """update_df_status should check the online status of
        preferred DFOs for all previously online datafiles and
        update online Parameter to 'False' for any offline files."""
        df1 = DataFile(dataset=self.dataset,
                       filename="test_df.jpg")
        df1.save()
        dfo1 = DataFileObject(datafile=df1,
                              storage_box=self.sbox1,
                              uri="stream/test.jpg",
                              verified=True)
        dfo1.save()

        schema = Schema.objects.get(namespace=HSM_DATAFILE_NAMESPACE)
        ps = DatafileParameterSet(schema=schema, datafile=df1)
        ps.save()

        param_name = ParameterName.objects.get(schema=schema, name="online")
        param = DatafileParameter(parameterset=ps, name=param_name)
        param.string_value = True
        param.save()

        mock_stat.return_value = Stats(st_size=10000,
                                       st_blocks=0,
                                       st_mtime=datetime.now())
        update_df_status()

        params = DatafileParameter.objects.filter(
            parameterset__schema=schema,
            parameterset__datafile=df1)

        self.assertEquals(params.count(), 1)
        self.assertEquals(params[0].string_value, "False")

    @mock.patch("os.stat")
    def test_001_update_df_status_online(self, mock_stat):
        """update_df_status should check the online status of
        preferred DFOs for all previously online datafiles and
        leave the online Parameter as 'True' for any online files."""
        df1 = DataFile(dataset=self.dataset,
                       filename="test_df.jpg")
        df1.save()
        dfo1 = DataFileObject(datafile=df1,
                              storage_box=self.sbox1,
                              uri="stream/test.jpg",
                              verified=True)
        dfo1.save()
        # df1.verify()

        schema = Schema.objects.get(namespace=HSM_DATAFILE_NAMESPACE)
        ps = DatafileParameterSet(schema=schema, datafile=df1)
        ps.save()

        param_name = ParameterName.objects.get(schema=schema, name="online")
        param = DatafileParameter(parameterset=ps, name=param_name)
        param.string_value = True
        param.save()

        mock_stat.return_value = Stats(st_size=10000,
                                       st_blocks=100,
                                       st_mtime=datetime.now())
        update_df_status()

        params = DatafileParameter.objects.filter(
            parameterset__schema__namespace=HSM_DATAFILE_NAMESPACE,
            parameterset__datafile=df1)

        self.assertEquals(params.count(), 1)
        self.assertEquals(params[0].string_value, "True")

    @mock.patch('mytardis_hsm.mytardis_hsm.df_online')
    @mock.patch("os.stat")
    def test_002_update_df_status_skip_unverified(self, mock_stat, df_online):
        """update_df_status should skip files that are unverified"""
        df2 = DataFile(dataset=self.dataset,
                       filename="test_df2.jpg")
        df2.save()
        dfo2 = DataFileObject(datafile=df2,
                              storage_box=self.sbox1,
                              uri="stream/test_df2.jpg")
        dfo2.save()

        schema = Schema.objects.get(namespace=HSM_DATAFILE_NAMESPACE)
        ps2 = DatafileParameterSet(schema=schema, datafile=df2)
        ps2.save()

        param_name = ParameterName.objects.get(schema=schema, name="online")
        param2 = DatafileParameter(parameterset=ps2, name=param_name)
        param2.string_value = True
        param2.save()

        mock_stat.return_value = Stats(st_size=10000,
                                       st_blocks=100,
                                       st_mtime=datetime.now())
        update_df_status()
        df_online.assert_not_called()

    @mock.patch('mytardis_hsm.tasks.df_online', autopec=True)
    @mock.patch("os.stat")
    def test_003_update_df_status_skip_offline(self, mock_stat, mock_df_online):
        """update_df_status should skip any files that have previously
        marked as offline."""
        df2 = DataFile(dataset=self.dataset,
                       filename="test_df2.jpg")
        df2.save()
        dfo2 = DataFileObject(datafile=df2,
                              storage_box=self.sbox1,
                              uri="stream/test_df2.jpg",
                              verified=True)
        dfo2.save()
        # df2.verify()

        schema = Schema.objects.get(namespace=HSM_DATAFILE_NAMESPACE)
        ps2 = DatafileParameterSet(schema=schema, datafile=df2)
        ps2.save()

        param_name = ParameterName.objects.get(schema=schema, name="online")
        param2 = DatafileParameter(parameterset=ps2, name=param_name)
        param2.string_value = False
        param2.save()

        mock_stat.return_value = Stats(st_size=10000,
                                       st_blocks=100,
                                       st_mtime=datetime.now())
        update_df_status()

        # assert that the df_online method wasn't called
        self.assertEquals(mock_df_online.call_count, 0)
Exemplo n.º 55
0
class ViewTemplateContextsTest(TestCase):
    def setUp(self):
        """
        setting up essential objects, copied from tests above
        """

        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)
        self.userProfile = self.user.userprofile
        self.exp = Experiment(title='test exp1',
                              institution_name='monash',
                              created_by=self.user)
        self.exp.save()
        self.acl = ObjectACL(
            pluginId=django_user,
            entityId=str(self.user.id),
            content_object=self.exp,
            canRead=True,
            isOwner=True,
            aclOwnershipType=ObjectACL.OWNER_OWNED,
        )
        self.acl.save()
        self.dataset = Dataset(description='dataset description...')
        self.dataset.save()
        self.dataset.experiments.add(self.exp)
        self.dataset.save()

        self.datafile = DataFile(dataset=self.dataset,
                                 size=42,
                                 filename="foo",
                                 md5sum="junk")
        self.datafile.save()

    def tearDown(self):
        self.user.delete()
        self.exp.delete()
        self.dataset.delete()
        self.datafile.delete()
        self.acl.delete()

    def testExperimentView(self):
        """
        test some template context parameters for an experiment view
        """
        from tardis.tardis_portal.views import ExperimentView
        from django.http import HttpRequest
        import sys

        # Default behavior
        views_module = flexmock(sys.modules['tardis.tardis_portal.views'])
        request = HttpRequest()
        request.method = 'GET'
        request.user = self.user
        request.groups = []
        context = {
            'organization': ['test', 'test2'],
            'default_organization':
            'test',
            'default_format':
            'tar',
            'protocol': [['tgz', '/download/experiment/1/tgz/'],
                         ['tar', '/download/experiment/1/tar/']]
        }
        views_module.should_call('render_response_index'). \
            with_args(_AnyMatcher(), "tardis_portal/view_experiment.html",
                      _ContextMatcher(context))
        view_fn = ExperimentView.as_view()
        response = view_fn(request, experiment_id=self.exp.id)
        self.assertEqual(response.status_code, 200)

        # Behavior with USER_AGENT_SENSING enabled and a request.user_agent
        saved_setting = getattr(settings, "USER_AGENT_SENSING", None)
        try:
            setattr(settings, "USER_AGENT_SENSING", True)
            request = HttpRequest()
            request.method = 'GET'
            request.user = self.user
            request.groups = []
            mock_agent = _MiniMock(os=_MiniMock(family="Macintosh"))
            setattr(request, 'user_agent', mock_agent)
            context = {
                'organization': ['classic', 'test', 'test2'],
                'default_organization': 'classic',
                'default_format': 'tar',
                'protocol': [['tar', '/download/experiment/1/tar/']]
            }
            views_module.should_call('render_response_index'). \
                with_args(_AnyMatcher(), "tardis_portal/view_experiment.html",
                          _ContextMatcher(context))
            view_fn = ExperimentView.as_view()
            response = view_fn(request, experiment_id=self.exp.id)
            self.assertEqual(response.status_code, 200)
        finally:
            if saved_setting is not None:
                setattr(settings, "USER_AGENT_SENSING", saved_setting)
            else:
                delattr(settings, "USER_AGENT_SENSING")

    def testDatasetView(self):
        """
        test some context parameters for a dataset view
        """
        from tardis.tardis_portal.views import DatasetView
        from django.http import HttpRequest
        import sys

        views_module = flexmock(sys.modules['tardis.tardis_portal.views'])
        request = HttpRequest()
        request.method = 'GET'
        request.user = self.user
        request.groups = []
        context = {'default_organization': 'test', 'default_format': 'tar'}
        views_module.should_call('render_response_index'). \
            with_args(_AnyMatcher(), "tardis_portal/view_dataset.html",
                      _ContextMatcher(context))
        view_fn = DatasetView.as_view()
        response = view_fn(request, dataset_id=self.dataset.id)
        self.assertEqual(response.status_code, 200)

        # Behavior with USER_AGENT_SENSING enabled and a request.user_agent
        saved_setting = getattr(settings, "USER_AGENT_SENSING", None)
        try:
            setattr(settings, "USER_AGENT_SENSING", True)
            request = HttpRequest()
            request.method = 'GET'
            request.user = self.user
            request.groups = []
            mock_agent = _MiniMock(os=_MiniMock(family="Macintosh"))
            setattr(request, 'user_agent', mock_agent)
            context = {
                'default_organization': 'classic',
                'default_format': 'tar'
            }
            views_module.should_call('render_response_index'). \
                with_args(_AnyMatcher(), "tardis_portal/view_dataset.html",
                          _ContextMatcher(context))
            view_fn = DatasetView.as_view()
            response = view_fn(request, dataset_id=self.dataset.id)
            self.assertEqual(response.status_code, 200)
        finally:
            if saved_setting is not None:
                setattr(settings, "USER_AGENT_SENSING", saved_setting)
            else:
                delattr(settings, "USER_AGENT_SENSING")
Exemplo n.º 56
0
class ContextualViewTest(TestCase):
    def setUp(self):
        """
        setting up essential objects, copied from tests above
        """
        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)
        self.userProfile = self.user.userprofile
        self.exp = Experiment(title='test exp1',
                              institution_name='monash',
                              created_by=self.user)
        self.exp.save()
        self.acl = ObjectACL(
            pluginId=django_user,
            entityId=str(self.user.id),
            content_object=self.exp,
            canRead=True,
            isOwner=True,
            aclOwnershipType=ObjectACL.OWNER_OWNED,
        )
        self.acl.save()
        self.dataset = Dataset(description='dataset description...')
        self.dataset.save()
        self.dataset.experiments.add(self.exp)
        self.dataset.save()

        self.datafile = DataFile(dataset=self.dataset,
                                 size=42,
                                 filename="foo",
                                 md5sum="junk")
        self.datafile.save()

        self.testschema = Schema(namespace="http://test.com/test/schema",
                                 name="Test View",
                                 type=Schema.DATAFILE,
                                 hidden=True)
        self.testschema.save()
        self.dfps = DatafileParameterSet(datafile=self.datafile,
                                         schema=self.testschema)
        self.dfps.save()

    def tearDown(self):
        self.user.delete()
        self.exp.delete()
        self.dataset.delete()
        self.datafile.delete()
        self.testschema.delete()
        self.dfps.delete()
        self.acl.delete()

    def testDetailsDisplay(self):
        """
        test display of view for an existing schema and no display for an
        undefined one.
        """
        from tardis.tardis_portal.views import display_datafile_details
        request = flexmock(user=self.user, groups=[("testgroup", flexmock())])
        with self.settings(DATAFILE_VIEWS=[(
                "http://test.com/test/schema",
                "/test/url"), ("http://does.not.exist", "/false/url")]):
            response = display_datafile_details(request,
                                                datafile_id=self.datafile.id)
            self.assertEqual(response.status_code, 200)
            self.assertTrue("/ajax/parameters/" in response.content)
            self.assertTrue("/test/url" in response.content)
            self.assertFalse("/false/url" in response.content)
Exemplo n.º 57
0
class UploadTestCase(TestCase):
    def setUp(self):
        from os import path, mkdir
        from tempfile import mkdtemp

        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)

        self.userProfile = UserProfile(user=self.user)

        self.test_dir = mkdtemp()

        self.exp = Experiment(title='test exp1',
                              institution_name='monash',
                              created_by=self.user)
        self.exp.save()

        acl = ExperimentACL(
            pluginId=django_user,
            entityId=str(self.user.id),
            experiment=self.exp,
            canRead=True,
            isOwner=True,
            aclOwnershipType=ExperimentACL.OWNER_OWNED,
        )
        acl.save()

        self.dataset = \
            Dataset(description='dataset description...',
                           experiment=self.exp)
        self.dataset.save()

        self.experiment_path = path.join(settings.FILE_STORE_PATH,
                                         str(self.dataset.experiment.id))

        self.dataset_path = path.join(self.experiment_path,
                                      str(self.dataset.id))

        if not path.exists(self.experiment_path):
            mkdir(self.experiment_path)
        if not path.exists(self.dataset_path):
            mkdir(self.dataset_path)

        # write test file

        self.filename = 'testfile.txt'

        self.f1 = open(path.join(self.test_dir, self.filename), 'w')
        self.f1.write('Test file 1')
        self.f1.close()

        self.f1_size = path.getsize(path.join(self.test_dir, self.filename))

        self.f1 = open(path.join(self.test_dir, self.filename), 'r')

    def tearDown(self):
        from shutil import rmtree

        self.f1.close()
        rmtree(self.test_dir)
        rmtree(self.dataset_path)
        rmtree(self.experiment_path)
        self.exp.delete()

    def testFileUpload(self):
        from os import path

        c = Client()
        c.login(username='******', password='******')
        session_id = c.session.session_key

        response = c.post('/upload/' + str(self.dataset.id) + '/', {
            'Filedata': self.f1,
            'session_id': session_id
        })

        test_files_db = \
            Dataset_File.objects.filter(dataset__id=self.dataset.id)

        self.assertTrue(
            path.exists(path.join(self.dataset_path, self.filename)))
        self.assertTrue(self.dataset.id == 1)
        self.assertTrue(test_files_db[0].url == 'tardis://testfile.txt')

    def testUploadComplete(self):
        from django.http import QueryDict, HttpRequest
        from tardis.tardis_portal.views import upload_complete
        data = [('filesUploaded', '1'), ('speed', 'really fast!'),
                ('allBytesLoaded', '2'), ('errorCount', '0')]
        post = QueryDict('&'.join(['%s=%s' % (k, v) for (k, v) in data]))
        request = HttpRequest()
        request.POST = post
        response = upload_complete(request)
        self.assertTrue('<p>Number: 1</p>' in response.content)
        self.assertTrue('<p>Errors: 0</p>' in response.content)
        self.assertTrue('<p>Bytes: 2</p>' in response.content)
        self.assertTrue('<p>Speed: really fast!</p>' in response.content)
Exemplo n.º 58
0
class StageFilesTestCase(TestCase):
    def setUp(self):
        # Create test owner without enough details
        username, email, password = ('testuser', '*****@*****.**',
                                     'password')
        user = User.objects.create_user(username, email, password)
        profile = UserProfile(user=user, isDjangoAccount=True)
        profile.save()
        # Need UserAuthentication
        UserAuthentication(userProfile=profile,
                           username=username,
                           authenticationMethod='localdb').save()
        # Create staging dir
        from os import path, makedirs
        staging_dir = path.join(settings.STAGING_PATH, username)
        if not path.exists(staging_dir):
            makedirs(staging_dir)
        # Ensure that staging dir is set up properly
        expect(get_full_staging_path(username)).to_be_truthy()

        # Create test experiment and make user the owner of it
        experiment = Experiment(title='Text Experiment',
                                institution_name='Test Uni',
                                created_by=user)
        experiment.save()
        acl = ExperimentACL(
            pluginId=django_user,
            entityId=str(user.id),
            experiment=experiment,\

            canRead=True,
            isOwner=True,
            aclOwnershipType=ExperimentACL.OWNER_OWNED,
            )
        acl.save()

        self.dataset = \
            Dataset(description='dataset description...')
        self.dataset.save()
        self.dataset.experiments.add(experiment)
        self.dataset.save()

        self.username, self.password = (username, password)

    def _get_authenticated_client(self):
        client = Client()
        # Login as user
        login = client.login(username=self.username, password=self.password)
        self.assertTrue(login)
        # Return authenticated client
        return client

    def _get_staging_url(self):
        return reverse('tardis.tardis_portal.views.stage_files_to_dataset',
                       args=[str(self.dataset.id)])

    def testForbiddenWithoutLogin(self):
        client = Client()
        response = client.get(self._get_staging_url())
        # Expect a redirect to login
        expect(response.status_code).to_equal(302)
        login_url = reverse('tardis.tardis_portal.views.login')
        ensure(login_url in response['Location'], True,
               "Redirect URL was not to login.")

    def testPostOnlyMethodAllowed(self):
        client = self._get_authenticated_client()

        for method in (x.lower() for x in ['GET', 'HEAD', 'PUT', 'OPTIONS']):
            response = getattr(client, method)(self._get_staging_url())
            # Expect a 405 Method Not Allowed
            expect(response.status_code).to_equal(405)
            # Expect valid "Allow" header
            response['Allow'] = 'POST'

        response = client.post(self._get_staging_url())
        # Expect something other than a 405
        self.assertFalse(response.status_code == 405)

    def testRequiresJSON(self):
        client = Client()

        # Login as user
        login = client.login(username=self.username, password=self.password)
        self.assertTrue(login)

        response = client.post(self._get_staging_url())
        # Expect 400 Bad Request because we didn't have a payload
        expect(response.status_code).to_equal(400)

        response = client.post(self._get_staging_url(),
                               data={'files': ['foo', 'bar']})
        # Expect 400 Bad Request because we didn't have a JSON payload
        expect(response.status_code).to_equal(400)

        response = client.post(self._get_staging_url(),
                               data=json.dumps({'files': ['foo', 'bar']}),
                               content_type='application/octet-stream')
        # Expect 400 Bad Request because we didn't have a JSON Content-Type
        expect(response.status_code).to_equal(400)

    def testStageFile(self):
        client = self._get_authenticated_client()

        staging_dir = get_full_staging_path(self.username)

        from os.path import basename
        from tempfile import NamedTemporaryFile
        with NamedTemporaryFile('w', dir=staging_dir) as f:
            # Write some content
            f.write('This is just some content')
            f.flush()

            data = [f.name]
            content_type = 'application/json; charset=utf-8'
            response = client.post(self._get_staging_url(),
                                   data=json.dumps(data),
                                   content_type=content_type)

            # Expect 201 Created
            expect(response.status_code).to_equal(201)
            # Expect to get a list of URLs back
            urls = json.loads(response.content)
            expect(len(urls)).to_equal(1)

            # Should have single staging file
            dataset = Dataset.objects.get(id=self.dataset.id)
            expect(dataset.dataset_file_set.count()).to_equal(1)
            datafile = dataset.dataset_file_set.all()[0]
            expect(datafile.filename).to_equal(basename(f.name))
            expect(urlparse(datafile.url).scheme).to_equal('')