Exemple #1
0
def fpupload(request, dataset_id):
    """
    Uploads all files picked by filepicker to the dataset

    :param request: a HTTP Request instance
    :type request: :class:`django.http.HttpRequest`
    :param dataset_id: the dataset_id
    :type dataset_id: integer
    :returns: boolean true if successful
    :rtype: bool
    """

    dataset = Dataset.objects.get(id=dataset_id)
    logger.debug('called fpupload')

    if request.method == 'POST':
        logger.debug('got POST')
        for key, val in request.POST.items():
            splits = val.split(",")
            for url in splits:
                try:
                    fp = FilepickerFile(url)
                except ValueError:
                    pass
                else:
                    picked_file = fp.get_file()
                    datafile = DataFile(dataset=dataset,
                                        filename=picked_file.name,
                                        size=picked_file.size)
                    datafile.save()
                    datafile.file_object = picked_file

    return HttpResponse(json.dumps({"result": True}))
    def test_000_update_df_status_offline(self, mock_stat):
        """update_df_status should check the online status of
        preferred DFOs for all previously online datafiles and
        update online Parameter to 'False' for any offline files."""
        df1 = DataFile(dataset=self.dataset,
                       filename="test_df.jpg")
        df1.save()
        dfo1 = DataFileObject(datafile=df1,
                              storage_box=self.sbox1,
                              uri="stream/test.jpg",
                              verified=True)
        dfo1.save()

        schema = Schema.objects.get(namespace=HSM_DATAFILE_NAMESPACE)
        ps = DatafileParameterSet(schema=schema, datafile=df1)
        ps.save()

        param_name = ParameterName.objects.get(schema=schema, name="online")
        param = DatafileParameter(parameterset=ps, name=param_name)
        param.string_value = True
        param.save()

        mock_stat.return_value = Stats(st_size=10000,
                                       st_blocks=0,
                                       st_mtime=datetime.now())
        update_df_status()

        params = DatafileParameter.objects.filter(
            parameterset__schema=schema,
            parameterset__datafile=df1)

        self.assertEquals(params.count(), 1)
        self.assertEquals(params[0].string_value, "False")
Exemple #3
0
def fpupload(request, dataset_id):
    """
    Uploads all files picked by filepicker to the dataset

    :param request: a HTTP Request instance
    :type request: :class:`django.http.HttpRequest`
    :param dataset_id: the dataset_id
    :type dataset_id: integer
    :returns: boolean true if successful
    :rtype: bool
    """

    dataset = Dataset.objects.get(id=dataset_id)
    logger.debug('called fpupload')

    if request.method == 'POST':
        logger.debug('got POST')
        for key, val in request.POST.items():
            splits = val.split(",")
            for url in splits:
                try:
                    fp = FilepickerFile(url)
                except ValueError:
                    pass
                else:
                    picked_file = fp.get_file()
                    datafile = DataFile(dataset=dataset,
                                        filename=picked_file.name,
                                        size=picked_file.size)
                    datafile.save()
                    datafile.file_object = picked_file

    return HttpResponse(json.dumps({"result": True}))
    def test_003_update_df_status_skip_offline(self, mock_stat, mock_df_online):
        """update_df_status should skip any files that have previously
        marked as offline."""
        df2 = DataFile(dataset=self.dataset,
                       filename="test_df2.jpg")
        df2.save()
        dfo2 = DataFileObject(datafile=df2,
                              storage_box=self.sbox1,
                              uri="stream/test_df2.jpg",
                              verified=True)
        dfo2.save()
        # df2.verify()

        schema = Schema.objects.get(namespace=HSM_DATAFILE_NAMESPACE)
        ps2 = DatafileParameterSet(schema=schema, datafile=df2)
        ps2.save()

        param_name = ParameterName.objects.get(schema=schema, name="online")
        param2 = DatafileParameter(parameterset=ps2, name=param_name)
        param2.string_value = False
        param2.save()

        mock_stat.return_value = Stats(st_size=10000,
                                       st_blocks=100,
                                       st_mtime=datetime.now())
        update_df_status()

        # assert that the df_online method wasn't called
        self.assertEquals(mock_df_online.call_count, 0)
Exemple #5
0
def upload(request, dataset_id):
    """
    Uploads a datafile to the store and datafile metadata

    :param request: a HTTP Request instance
    :type request: :class:`django.http.HttpRequest`
    :param dataset_id: the dataset_id
    :type dataset_id: integer
    :returns: boolean true if successful
    :rtype: bool
    """

    dataset = Dataset.objects.get(id=dataset_id)

    logger.debug('called upload')
    if request.method == 'POST':
        logger.debug('got POST')
        if request.FILES:

            uploaded_file_post = request.FILES['Filedata']
            logger.debug('done upload')
            datafile = DataFile(dataset=dataset,
                                filename=uploaded_file_post.name,
                                size=uploaded_file_post.size)
            datafile.save(require_checksums=False)
            logger.debug('created file')
            datafile.file_object = uploaded_file_post
            logger.debug('saved datafile')

    return HttpResponse('True')
Exemple #6
0
def register_squashfile(exp_id, epn, sq_dir, sq_filename, namespace):
    '''
    example:
    register_squashfile(456, '1234A', '/srv/squashstore', '1234A.squashfs',
        'http://synchrotron.org.au/mx/squashfsarchive/1')
    '''
    dfs = DataFile.objects.filter(filename=sq_filename,
                                  dataset__experiments__id=exp_id)
    if len(dfs) == 1:
        return dfs[0]
    e = Experiment.objects.get(id=exp_id)
    ds = Dataset(description="01 SquashFS Archive")
    ds.save()
    ds.experiments.add(e)
    filepath = os.path.join(sq_dir, sq_filename)
    try:
        md5sum = open(filepath + '.md5sum', 'r').read().strip()[:32]
    except:
        print 'no md5sum file found'
        return None
    size = os.path.getsize(filepath)
    df = DataFile(md5sum=md5sum,
                  filename=sq_filename,
                  size=str(size),
                  dataset=ds)
    df.save()
    schema = Schema.objects.filter(namespace=namespace)[0]
    ps = DatafileParameterSet(schema=schema, datafile=df)
    ps.save()
    ps.set_param('EPN', epn)
    sbox = StorageBox.objects.get(name='squashstore')
    dfo = DataFileObject(storage_box=sbox, datafile=df, uri=sq_filename)
    dfo.save()
    return df
Exemple #7
0
    def setUp(self):
        """
        setting up essential objects, copied from tests above
        """

        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)
        self.userProfile = self.user.userprofile
        self.exp = Experiment(title='test exp1',
                              institution_name='monash',
                              created_by=self.user)
        self.exp.save()
        self.acl = ObjectACL(
            pluginId=django_user,
            entityId=str(self.user.id),
            content_object=self.exp,
            canRead=True,
            isOwner=True,
            aclOwnershipType=ObjectACL.OWNER_OWNED,
        )
        self.acl.save()
        self.dataset = Dataset(description='dataset description...')
        self.dataset.save()
        self.dataset.experiments.add(self.exp)
        self.dataset.save()

        self.datafile = DataFile(dataset=self.dataset,
                                 size=42,
                                 filename="foo",
                                 md5sum="junk")
        self.datafile.save()
def upload(request, dataset_id):
    """
    Uploads a datafile to the store and datafile metadata

    :param request: a HTTP Request instance
    :type request: :class:`django.http.HttpRequest`
    :param dataset_id: the dataset_id
    :type dataset_id: integer
    :returns: boolean true if successful
    :rtype: bool
    """

    dataset = Dataset.objects.get(id=dataset_id)

    logger.debug('called upload')
    if request.method == 'POST':
        logger.debug('got POST')
        if request.FILES:

            uploaded_file_post = request.FILES['Filedata']
            logger.debug('done upload')
            datafile = DataFile(dataset=dataset,
                                filename=uploaded_file_post.name,
                                size=uploaded_file_post.size)
            datafile.save(require_checksums=False)
            logger.debug('created file')
            datafile.file_object = uploaded_file_post
            logger.debug('saved datafile')

    return HttpResponse('True')
Exemple #9
0
 def create_dfo(self, top, filename, dataset=None):
     '''
     create dfo and datafile if necessary
     '''
     df, df_data = self.find_datafile(top, filename)
     if df is None and df_data is None:
         return True  # is a link
     if df:
         if dataset is not None and df.dataset.id != dataset.id:
             # olddataset_id = df.dataset.id
             df.dataset = dataset
             df.save()
             # oldds = Dataset.objects.get(id=olddataset_id)
             # if oldds.datafile_set.count() == 0:
             #     oldds.delete()
         elif dataset is None and top.startswith('frames'):
             prefix = 'Raw data for'
             prefix_dataset(df.dataset, prefix)
         self.update_dataset(df.dataset, top)
     else:
         if dataset is None:
             dataset = self.get_or_create_dataset('lost and found')
         df = DataFile(dataset=dataset,
                       filename=filename,
                       directory=top,
                       **df_data)
         df.save()
     dfo = DataFileObject(datafile=df,
                          storage_box=self.s_box,
                          uri=os.path.join(top, filename))
     dfo.save()
     return True
def _create_test_dataset(nosDatafiles):
    ds_ = Dataset(description='happy snaps of plumage')
    ds_.save()
    for i in range(0, nosDatafiles):
        df_ = DataFile(dataset=ds_, filename='file_%d' % i, size='21',
                       sha512sum='bogus')
        df_.save()
    ds_.save()
    return ds_
Exemple #11
0
    def setUp(self):
        """Setup test fixtures if needed."""
        self.user = User.objects.create_user("doctor", '', "pwd")

        self.exp = Experiment(title="Wonderful",
                              institution_name="Monash University",
                              created_by=self.user)
        self.exp.save()

        group = Group(name="Group1")
        group.save()

        facility = Facility(name="Test Facility", manager_group=group)
        facility.save()

        self.inst = Instrument(name="Test Instrument1", facility=facility)
        self.inst.save()

        self.dataset = Dataset(description="Dataset1", instrument=self.inst)
        self.dataset.save()

        storage_classes = getattr(settings, "HSM_STORAGE_CLASSES",
                                  DEFAULT_HSM_CLASSES)
        self.sbox1 = StorageBox(name="SBOX1",
                                django_storage_class=storage_classes[0],
                                status='online',
                                max_size=256)
        self.sbox1.save()
        sbox1_attr = StorageBoxAttribute(storage_box=self.sbox1,
                                         key='type',
                                         value=StorageBox.DISK)
        sbox1_attr.save()
        sbox1_loc_opt = StorageBoxOption(storage_box=self.sbox1,
                                         key="location",
                                         value="/dummy/path")
        sbox1_loc_opt.save()

        self.sbox2 = StorageBox(
            name="SBOX2",
            django_storage_class="any.non.disk.StorageSystem",
            status='offline',
            max_size=256)
        self.sbox2.save()
        sbox2_attr = StorageBoxAttribute(storage_box=self.sbox2,
                                         key='type',
                                         value=StorageBox.TAPE)
        sbox2_attr.save()

        self.df1 = DataFile(dataset=self.dataset, filename="test_df.jpg")
        self.df1.save()
        self.dfo1 = DataFileObject(datafile=self.df1,
                                   storage_box=self.sbox1,
                                   uri="stream/test.jpg",
                                   verified=True)
        self.dfo1.save()
        self.df1.verify()
Exemple #12
0
def create_staging_datafile(filepath, username, dataset_id):
    from tardis.tardis_portal.models import DataFile, Dataset
    dataset = Dataset.objects.get(id=dataset_id)

    url, size = get_staging_url_and_size(username, filepath)
    datafile = DataFile(dataset=dataset,
                        filename=path.basename(filepath),
                        size=size)
    datafile.save()
    datafile.file_object = open(filepath, 'r')
Exemple #13
0
    def setUp(self):
        raise SkipTest  # temporarily disabling this feature, needs coding
        from tempfile import mkdtemp, mktemp
        from django.conf import settings
        import os

        # Disconnect post_save signal
        from django.db.models.signals import post_save
        from tardis.tardis_portal.models import Experiment, \
            staging_hook, Dataset, DataFile, DataFileObject, StorageBox
        post_save.disconnect(staging_hook, sender=DataFileObject)

        from django.contrib.auth.models import User
        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)

        try:
            os.makedirs(settings.GET_FULL_STAGING_PATH_TEST)
        except OSError:
            pass
        self.temp = mkdtemp(dir=settings.GET_FULL_STAGING_PATH_TEST)

        self.filepath = mktemp(dir=self.temp)
        content = 'test file'
        with open(self.filepath, "w+b") as f:
            f.write(content)

        # make datafile
        exp = Experiment(title='test exp1',
                         institution_name='monash',
                         created_by=self.user)
        exp.save()

        # make dataset
        dataset = Dataset(description="dataset description...")
        dataset.save()
        dataset.experiments.add(exp)
        dataset.save()

        # create datafile
        df = DataFile(dataset=dataset, size=len(content),
                      filename=path.basename(self.file),
                      md5sum='f20d9f2072bbeb6691c0f9c5099b01f3')
        df.save()

        # create replica
        base_url = settings.GET_FULL_STAGING_PATH_TEST
        s_box = StorageBox.get_default_storage(location=base_url)
        dfo = DataFileObject(datafile=df,
                             uri=self.filepath,
                             storage_box=s_box)
        dfo.save()
        self.dfo = dfo
Exemple #14
0
def create_staging_datafile(filepath, username, dataset_id):
    init_filters()
    from tardis.tardis_portal.models import DataFile, Dataset
    dataset = Dataset.objects.get(id=dataset_id)

    url, size = get_staging_url_and_size(username, filepath)
    datafile = DataFile(dataset=dataset,
                        filename=path.basename(filepath),
                        size=size)
    datafile.save()
    datafile.file_object = open(filepath, 'r')
Exemple #15
0
def _create_test_dataset(nosDatafiles):
    ds_ = Dataset(description='happy snaps of plumage')
    ds_.save()
    for i in range(0, nosDatafiles):
        df_ = DataFile(dataset=ds_,
                       filename='file_%d' % i,
                       size='21',
                       sha512sum='bogus')
        df_.save()
    ds_.save()
    return ds_
Exemple #16
0
 def _build(dataset, filename, url):
     from tardis.tardis_portal.models import \
         DataFileObject
     datafile = DataFile(dataset=dataset, filename=filename)
     datafile.save()
     dfo = DataFileObject(
         datafile=datafile,
         storage_box=datafile.get_default_storage_box(),
         uri=url)
     dfo.save()
     return datafile
Exemple #17
0
 def _build(dataset, filename, url):
     datafile_content = b"\n".join([b'some data %d' % i for i in range(1000)])
     filesize = len(datafile_content)
     datafile = DataFile(
         dataset=dataset, filename=filename, size=filesize)
     datafile.save()
     dfo = DataFileObject(
         datafile=datafile,
         storage_box=datafile.get_default_storage_box(),
         uri=url)
     dfo.file_object = BytesIO(datafile_content)
     dfo.save()
     return datafile
Exemple #18
0
    def test_create_index(self):
        self.exp1 = Experiment(title='test exp1',
                               institution_name='monash',
                               description='Test Description',
                               created_by=self.user)
        self.exp2 = Experiment(title='test exp2',
                               institution_name='monash',
                               description='Test Description',
                               created_by=self.user)
        self.exp1.save()
        self.exp2.save()
        # get search instance
        search = ExperimentDocument.search()
        # query for title(exact matching)
        query = search.query("match", title='test exp1')
        result = query.execute(ignore_cache=True)
        self.assertEqual(result.hits[0].title, 'test exp1')
        # query for description
        query = search.query("match", description='Test Description')
        result = query.execute(ignore_cache=True)
        self.assertEqual(result.hits[0].description, 'Test Description')
        # query for created_time
        query = search.query("match", created_time=self.exp1.created_time)
        result = query.execute(ignore_cache=True)
        self.assertEqual(result.hits[0].created_time, self.exp1.created_time)
        # dataset
        # dataset1 belongs to experiment1
        self.dataset1 = Dataset(description='test_dataset')
        self.dataset1.save()
        self.dataset1.experiments.add(self.exp1)
        self.dataset1.save()

        # dataset2 belongs to experiment2
        self.dataset2 = Dataset(description='test_dataset2')
        self.dataset2.save()
        self.dataset2.experiments.add(self.exp2)
        self.dataset2.save()
        # search on dataset
        search = DatasetDocument.search()
        query = search.query("match", description='test_dataset')
        result = query.execute(ignore_cache=True)
        self.assertEqual(result.hits.total.value, 1)
        # search on datafile
        settings.REQUIRE_DATAFILE_SIZES = False
        settings.REQUIRE_DATAFILE_CHECKSUMS = False
        self.datafile = DataFile(dataset=self.dataset1, filename='test.txt')
        self.datafile.save()
        search = DataFileDocument.search()
        query = search.query("match", filename='test.txt')
        result = query.execute(ignore_cache=True)
        self.assertEqual(result.hits[0].filename, self.datafile.filename)
 def _build(dataset, filename, url=None):
     datafile = DataFile(dataset=dataset, filename=filename)
     datafile.save()
     if url is None:
         datafile.file_object = StringIO('bla')
         return datafile
     from tardis.tardis_portal.models import \
         DataFileObject
     dfo = DataFileObject(
         datafile=datafile,
         storage_box=datafile.get_default_storage_box(),
         uri=url)
     dfo.save()
     return datafile
Exemple #20
0
    def test_007_dfo_unverified(self):
        """df_online and dfo_online should raise Exception for an unverfied DataFile or
        DataFileObject, respectively"""
        df2 = DataFile(dataset=self.dataset, filename="test_df.jpg")
        df2.save()
        self.assertRaises(DataFileNotVerified, df_online, df2)

        dfo2 = DataFileObject(datafile=df2,
                              storage_box=self.sbox1,
                              uri="stream/test.jpg",
                              verified=False)
        dfo2.save()

        self.assertRaises(DataFileObjectNotVerified, dfo_online, dfo2)
Exemple #21
0
 def _build(dataset, filename, url):
     datafile_content = u"\n".join([u'some data %d' % i
                                   for i in range(1000)])
     filesize = len(datafile_content)
     datafile = DataFile(
         dataset=dataset, filename=filename, size=filesize)
     datafile.save()
     dfo = DataFileObject(
         datafile=datafile,
         storage_box=datafile.get_default_storage_box(),
         uri=url)
     dfo.file_object = StringIO(datafile_content)
     dfo.save()
     return datafile
Exemple #22
0
    def test_wrong_size_verification(self):
        content = urandom(1024)
        cf = ContentFile(content, 'background_task_testfile')

        # Create new Datafile
        datafile = DataFile(dataset=self.dataset)
        datafile.filename = cf.name
        datafile.size = len(content) - 1
        datafile.sha512sum = hashlib.sha512(content).hexdigest()
        datafile.save()
        datafile.file_object = cf
        # verify explicitly to catch Exceptions hidden by celery
        datafile.verify()
        self.assertFalse(datafile.file_objects.get().verified)
Exemple #23
0
 def _build_datafile(self, testfile, filename, dataset, checksum=None,
                     size=None, mimetype=''):
     filesize, sha512sum = get_size_and_sha512sum(testfile)
     datafile = DataFile(dataset=dataset, filename=filename,
                         mimetype=mimetype,
                         size=size if size is not None else filesize,
                         sha512sum=(checksum if checksum else sha512sum))
     datafile.save()
     dfo = DataFileObject(
         datafile=datafile,
         storage_box=datafile.get_default_storage_box())
     dfo.save()
     with open(testfile, 'r') as sourcefile:
         dfo.file_object = sourcefile
     return DataFile.objects.get(pk=datafile.pk)
Exemple #24
0
    def setUp(self):
        """
        setting up essential objects, copied from tests above
        """
        user = "******"
        pwd = "secret"
        email = ""
        self.user = User.objects.create_user(user, email, pwd)
        self.userProfile = self.user.userprofile
        self.exp = Experiment(title="test exp1", institution_name="monash", created_by=self.user)
        self.exp.save()
        self.acl = ObjectACL(
            pluginId=django_user,
            entityId=str(self.user.id),
            content_object=self.exp,
            canRead=True,
            isOwner=True,
            aclOwnershipType=ObjectACL.OWNER_OWNED,
        )
        self.acl.save()
        self.dataset = Dataset(description="dataset description...")
        self.dataset.save()
        self.dataset.experiments.add(self.exp)
        self.dataset.save()

        self.datafile = DataFile(dataset=self.dataset, size=42, filename="foo", md5sum="junk")
        self.datafile.save()

        self.testschema = Schema(
            namespace="http://test.com/test/schema", name="Test View", type=Schema.DATAFILE, hidden=True
        )
        self.testschema.save()
        self.dfps = DatafileParameterSet(datafile=self.datafile, schema=self.testschema)
        self.dfps.save()
def dataset_aggregate_info(dataset):
    datafiles_all = DataFile.objects.filter(dataset=dataset)
    verified_datafiles_count = \
        DataFile.objects.filter(dataset=dataset) \
        .values('id') \
        .annotate(min_verified=Min(Case(When(file_objects__verified=True,
                                             then=1),
                                        default=0,
                                        output_field=IntegerField()))) \
        .filter(min_verified=1) \
        .order_by('id').count()
    verified_datafiles_size = \
        DataFile.objects.filter(dataset=dataset) \
            .values('id') \
            .annotate(min_verified=Min(Case(When(file_objects__verified=True,
                                                 then=1),
                                            default=0,
                                            output_field=IntegerField()))) \
            .filter(min_verified=1) \
            .order_by('id') \
            .aggregate(Sum('size'))['size__sum'] or 0
    return {
        "dataset_size": DataFile.sum_sizes(datafiles_all),
        "verified_datafiles_count": verified_datafiles_count,
        "verified_datafiles_size": verified_datafiles_size,
        "datafile_count": datafiles_all.count()
    }
Exemple #26
0
    def setUp(self):
        """
        setting up essential objects, copied from tests above
        """

        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)
        self.userProfile = self.user.userprofile
        self.exp = Experiment(title='test exp1',
                              institution_name='monash', created_by=self.user)
        self.exp.save()
        self.acl = ObjectACL(
            pluginId=django_user,
            entityId=str(self.user.id),
            content_object=self.exp,
            canRead=True,
            isOwner=True,
            aclOwnershipType=ObjectACL.OWNER_OWNED,
        )
        self.acl.save()
        self.dataset = Dataset(description='dataset description...')
        self.dataset.save()
        self.dataset.experiments.add(self.exp)
        self.dataset.save()

        self.datafile = DataFile(dataset=self.dataset,
                                 size=42, filename="foo",
                                 md5sum="junk")
        self.datafile.save()
        def create_datafile(index):
            testfile = path.join(base_path, 'jeol_sem_test%d.txt' % index)
            size, sha512sum = get_size_and_sha512sum(testfile)

            datafile = DataFile(dataset=dataset,
                                filename=path.basename(testfile),
                                size=size,
                                sha512sum=sha512sum)
            datafile.save()
            dfo = DataFileObject(
                datafile=datafile,
                storage_box=s_box,
                uri=path.basename(testfile))
            dfo.save()

            return DataFile.objects.get(pk=datafile.pk)
Exemple #28
0
def dataset_aggregate_info(dataset):
    datafiles_all = DataFile.objects.filter(dataset=dataset)
    verified_datafiles_count = \
        DataFile.objects.filter(dataset=dataset) \
        .values('id') \
        .annotate(min_verified=Min(Case(When(file_objects__verified=True,
                                             then=1),
                                        default=0,
                                        output_field=IntegerField()))) \
        .filter(min_verified=1) \
        .order_by('id').count()
    verified_datafiles_size = \
        DataFile.objects.filter(dataset=dataset) \
            .values('id') \
            .annotate(min_verified=Min(Case(When(file_objects__verified=True,
                                                 then=1),
                                            default=0,
                                            output_field=IntegerField()))) \
            .filter(min_verified=1) \
            .order_by('id') \
            .aggregate(Sum('size'))['size__sum'] or 0
    return {
        "dataset_size": DataFile.sum_sizes(datafiles_all),
        "verified_datafiles_count": verified_datafiles_count,
        "verified_datafiles_size": verified_datafiles_size,
        "datafile_count": datafiles_all.count()
    }
Exemple #29
0
def experiment_description(request, experiment_id):
    """View an existing experiment's description. To be loaded via ajax.

    :param request: a HTTP Request instance
    :type request: :class:`django.http.HttpRequest`
    :param experiment_id: the ID of the experiment to be edited
    :type experiment_id: string
    :rtype: :class:`django.http.HttpResponse`

    """
    c = {}

    try:
        experiment = Experiment.safe.get(request.user, experiment_id)
    except PermissionDenied:
        return return_response_error(request)
    except Experiment.DoesNotExist:
        return return_response_not_found(request)

    c['experiment'] = experiment
    c['subtitle'] = experiment.title
    c['nav'] = [{
        'name': 'Data',
        'link': '/experiment/view/'
    }, {
        'name': experiment.title,
        'link': experiment.get_absolute_url()
    }]

    c['authors'] = experiment.experimentauthor_set.all()

    c['datafiles'] = \
        DataFile.objects.filter(dataset__experiments=experiment_id)

    c['owners'] = experiment.get_owners()

    # calculate the sum of the datafile sizes
    c['size'] = DataFile.sum_sizes(c['datafiles'])

    c['has_download_permissions'] = \
        authz.has_experiment_download_access(request, experiment_id)

    c['has_write_permissions'] = \
        authz.has_write_permissions(request, experiment_id)

    if request.user.is_authenticated():
        c['is_owner'] = authz.has_experiment_ownership(request, experiment_id)

    _add_protocols_and_organizations(request, experiment, c)

    if 'status' in request.GET:
        c['status'] = request.GET['status']
    if 'error' in request.GET:
        c['error'] = request.GET['error']

    return HttpResponse(
        render_response_index(
            request, 'tardis_portal/ajax/experiment_description.html', c))
    def test_wrong_size_verification(self):
        content = urandom(1024)
        cf = ContentFile(content, 'background_task_testfile')

        # Create new Datafile
        datafile = DataFile(dataset=self.dataset)
        datafile.filename = cf.name
        datafile.size = len(content) - 1
        datafile.sha512sum = hashlib.sha512(content).hexdigest()
        datafile.save()
        datafile.file_object = cf
        # verify explicitly to catch Exceptions hidden by celery
        datafile.verify()
        self.assertFalse(datafile.file_objects.get().verified)
Exemple #31
0
 def setUp(self):
     super(SimpleSearchTest, self).setUp()
     self.out = StringIO()
     call_command('search_index',
                  stdout=self.out,
                  action='delete',
                  force=True)
     call_command('search_index',
                  stdout=self.out,
                  action='rebuild',
                  force=True)
     # add dataset and datafile to experiment
     self.dataset1 = Dataset(description='test_dataset')
     self.dataset1.save()
     self.dataset1.experiments.add(self.testexp)
     self.dataset1.save()
     settings.REQUIRE_DATAFILE_SIZES = False
     settings.REQUIRE_DATAFILE_CHECKSUMS = False
     self.datafile = DataFile(dataset=self.dataset1, filename='test.txt')
     self.datafile.save()
Exemple #32
0
 def _build_datafile(self,
                     testfile,
                     filename,
                     dataset,
                     checksum=None,
                     size=None,
                     mimetype=''):
     filesize, sha512sum = get_size_and_sha512sum(testfile)
     datafile = DataFile(dataset=dataset,
                         filename=filename,
                         mimetype=mimetype,
                         size=size if size is not None else filesize,
                         sha512sum=(checksum if checksum else sha512sum))
     datafile.save()
     dfo = DataFileObject(datafile=datafile,
                          storage_box=datafile.get_default_storage_box())
     dfo.save()
     with open(testfile, 'r') as sourcefile:
         dfo.file_object = sourcefile
     return DataFile.objects.get(pk=datafile.pk)
        def create_datafile(index):
            testfile = path.join(base_path, 'middleware_test%d.txt' % index)

            size, sha512sum = get_size_and_sha512sum(testfile)

            datafile = DataFile(dataset=dataset,
                                filename=path.basename(testfile),
                                size=size,
                                sha512sum=sha512sum)
            datafile.save()
            dfo = DataFileObject(
                datafile=datafile,
                storage_box=s_box,
                uri=path.basename(testfile))
            dfo.save()

            if index != 1:
                dfo.verified = False
                dfo.save(update_fields=['verified'])
            return DataFile.objects.get(pk=datafile.pk)
def experiment_description(request, experiment_id):
    """View an existing experiment's description. To be loaded via ajax.

    :param request: a HTTP Request instance
    :type request: :class:`django.http.HttpRequest`
    :param experiment_id: the ID of the experiment to be edited
    :type experiment_id: string
    :rtype: :class:`django.http.HttpResponse`

    """
    c = {}

    try:
        experiment = Experiment.safe.get(request.user, experiment_id)
    except PermissionDenied:
        return return_response_error(request)
    except Experiment.DoesNotExist:
        return return_response_not_found(request)

    c['experiment'] = experiment
    c['subtitle'] = experiment.title
    c['nav'] = [{'name': 'Data', 'link': '/experiment/view/'},
                {'name': experiment.title,
                 'link': experiment.get_absolute_url()}]

    c['authors'] = experiment.experimentauthor_set.all()

    c['datafiles'] = \
        DataFile.objects.filter(dataset__experiments=experiment_id)

    c['owners'] = experiment.get_owners()

    # calculate the sum of the datafile sizes
    c['size'] = DataFile.sum_sizes(c['datafiles'])

    c['has_download_permissions'] = \
        authz.has_experiment_download_access(request, experiment_id)

    c['has_write_permissions'] = \
        authz.has_write_permissions(request, experiment_id)

    if request.user.is_authenticated():
        c['is_owner'] = authz.has_experiment_ownership(request, experiment_id)

    _add_protocols_and_organizations(request, experiment, c)

    if 'status' in request.GET:
        c['status'] = request.GET['status']
    if 'error' in request.GET:
        c['error'] = request.GET['error']

    return HttpResponse(render_response_index(request,
                        'tardis_portal/ajax/experiment_description.html', c))
Exemple #35
0
    def test_002_no_duplicate_params(self, mock_stat):
        """Datafile should only ever have one online param"""
        mock_stat.return_value = Stats(st_size=10000,
                                       st_blocks=100,
                                       st_mtime=datetime.now())

        df1 = DataFile(dataset=self.dataset, filename="test_df.jpg")
        df1.save()
        dfo1 = DataFileObject(datafile=df1,
                              storage_box=self.sbox1,
                              uri="stream/test.jpg",
                              verified=True)
        dfo1.save()
        df1.verify()

        param_name = ParameterName.objects.get(
            schema__namespace=HSM_DATAFILE_NAMESPACE, name="online")

        paramset = DatafileParameterSet.objects.get(
            schema__namespace=HSM_DATAFILE_NAMESPACE, datafile=df1)

        params = DatafileParameter.objects.filter(parameterset=paramset,
                                                  name=param_name)

        self.assertEquals(params.count(), 1)

        self.assertRaises(OnlineParamExistsError,
                          create_df_status(df1, HSM_DATAFILE_NAMESPACE, 500))

        params = DatafileParameter.objects.filter(parameterset=paramset,
                                                  name=param_name)

        self.assertEquals(params.count(), 1)
Exemple #36
0
    def test_001_create_df_status(self, mock_stat):
        """When a new datafile record is verified, metadata for it's
        online/offline status should be created and populated with the
        current online status"""
        mock_stat.return_value = Stats(st_size=10000,
                                       st_blocks=100,
                                       st_mtime=datetime.now())

        temp = tempfile.NamedTemporaryFile(dir=tempfile.gettempdir())
        temp_name = os.path.basename(temp.name)
        df2 = DataFile(dataset=self.dataset, filename=temp_name)
        df2.save()
        dfo2 = DataFileObject(datafile=df2,
                              storage_box=self.sbox1,
                              uri=temp_name)
        dfo2.save()
        df2.verify()

        param_name = ParameterName.objects.get(
            schema__namespace=HSM_DATAFILE_NAMESPACE, name="online")

        paramset = DatafileParameterSet.objects.get(
            schema__namespace=HSM_DATAFILE_NAMESPACE, datafile=df2)

        param = DatafileParameter.objects.get(parameterset=paramset,
                                              name=param_name)

        self.assertEquals(param.string_value, "True")
        temp.close()
Exemple #37
0
 def test_deleting_dfo_without_uri(self):
     dataset = Dataset(description="dataset description")
     dataset.save()
     save1 = settings.REQUIRE_DATAFILE_SIZES
     save2 = settings.REQUIRE_DATAFILE_CHECKSUMS
     try:
         settings.REQUIRE_DATAFILE_SIZES = False
         settings.REQUIRE_DATAFILE_CHECKSUMS = False
         datafile = DataFile(dataset=dataset, filename='test1.txt')
         datafile.save()
     finally:
         settings.REQUIRE_DATAFILE_SIZES = save1
         settings.REQUIRE_DATAFILE_CHECKSUMS = save2
     dfo = DataFileObject(
             datafile=datafile,
             storage_box=datafile.get_default_storage_box(),
             uri=None)
     dfo.save()
     self.assertIsNone(dfo.uri)
     self.assertIsNotNone(dfo.id)
     dfo.delete()
     self.assertIsNone(dfo.id)
    def test_002_update_df_status_skip_unverified(self, mock_stat, df_online):
        """update_df_status should skip files that are unverified"""
        df2 = DataFile(dataset=self.dataset,
                       filename="test_df2.jpg")
        df2.save()
        dfo2 = DataFileObject(datafile=df2,
                              storage_box=self.sbox1,
                              uri="stream/test_df2.jpg")
        dfo2.save()

        schema = Schema.objects.get(namespace=HSM_DATAFILE_NAMESPACE)
        ps2 = DatafileParameterSet(schema=schema, datafile=df2)
        ps2.save()

        param_name = ParameterName.objects.get(schema=schema, name="online")
        param2 = DatafileParameter(parameterset=ps2, name=param_name)
        param2.string_value = True
        param2.save()

        mock_stat.return_value = Stats(st_size=10000,
                                       st_blocks=100,
                                       st_mtime=datetime.now())
        update_df_status()
        df_online.assert_not_called()
Exemple #39
0
def stats(request):
    # using count() is more efficient than using len() on a query set
    cursor = connection.cursor()
    if cursor.db.vendor == 'postgresql':
        cursor.execute("SELECT SUM(size::bigint) FROM tardis_portal_datafile")
        try:
            datafile_size = int(cursor.fetchone()[0])
        except TypeError:
            datafile_size = 0
    else:
        datafile_size = DataFile.sum_sizes(DataFile.objects.all())
    c = {
        'experiment_count': Experiment.objects.all().count(),
        'dataset_count': Dataset.objects.all().count(),
        'datafile_count': DataFile.objects.all().count(),
        'datafile_size': datafile_size,
    }
    return HttpResponse(
        render_response_index(request, 'tardis_portal/stats.html', c))
Exemple #40
0
def stats(request):
    # using count() is more efficient than using len() on a query set
    cursor = connection.cursor()
    if cursor.db.vendor == 'postgresql':
        cursor.execute("SELECT SUM(size::bigint) FROM tardis_portal_datafile")
        try:
            datafile_size = int(cursor.fetchone()[0])
        except TypeError:
            datafile_size = 0
    else:
        datafile_size = DataFile.sum_sizes(DataFile.objects.all())
    c = {
        'experiment_count': Experiment.objects.all().count(),
        'dataset_count': Dataset.objects.all().count(),
        'datafile_count': DataFile.objects.all().count(),
        'datafile_size': datafile_size,
    }
    return HttpResponse(render_response_index(request,
                        'tardis_portal/stats.html', c))
Exemple #41
0
 def _build(dataset, filename, url=None):
     datafile = DataFile(dataset=dataset, filename=filename)
     datafile.save()
     if url is None:
         datafile.file_object = StringIO('bla')
         return datafile
     from tardis.tardis_portal.models import \
         DataFileObject
     dfo = DataFileObject(
         datafile=datafile,
         storage_box=datafile.get_default_storage_box(),
         uri=url)
     dfo.save()
     return datafile
Exemple #42
0
    def testLocalFile(self):
        content = urandom(1024)
        cf = ContentFile(content, 'background_task_testfile')

        # Create new Datafile
        datafile = DataFile(dataset=self.dataset)
        datafile.filename = cf.name
        datafile.size = len(content)
        datafile.sha512sum = hashlib.sha512(content).hexdigest()
        datafile.save()
        datafile.file_object = cf

        dfo = datafile.file_objects.all()[0]
        # undo auto-verify:
        dfo.verified = False
        dfo.save(update_fields=['verified'])

        # Check that it's not currently verified
        expect(datafile.verified).to_be(False)
        # Check it verifies
        verify_dfos()
        expect(datafile.verified).to_be(True)
Exemple #43
0
def _create_datafile():
    user = User.objects.create_user('testuser', '*****@*****.**', 'pwd')
    user.save()

    full_access = Experiment.PUBLIC_ACCESS_FULL
    experiment = Experiment.objects.create(title="IIIF Test",
                                           created_by=user,
                                           public_access=full_access)
    experiment.save()
    ObjectACL(content_object=experiment,
              pluginId='django_user',
              entityId=str(user.id),
              isOwner=True,
              canRead=True,
              canWrite=True,
              canDelete=True,
              aclOwnershipType=ObjectACL.OWNER_OWNED).save()
    dataset = Dataset()
    dataset.save()
    dataset.experiments.add(experiment)
    dataset.save()

    # Create new Datafile
    tempfile = TemporaryUploadedFile('iiif_stored_file', None, None, None)
    with Image(filename='magick:rose') as img:
        img.format = 'tiff'
        img.save(file=tempfile.file)
        tempfile.file.flush()
    datafile = DataFile(dataset=dataset,
                        size=os.path.getsize(tempfile.file.name),
                        filename='iiif_named_file',
                        mimetype='image/tiff')
    compute_md5 = getattr(settings, 'COMPUTE_MD5', True)
    compute_sha512 = getattr(settings, 'COMPUTE_SHA512', True)
    checksums = compute_checksums(open(tempfile.file.name, 'r'),
                                  compute_md5=compute_md5,
                                  compute_sha512=compute_sha512)
    if compute_md5:
        datafile.md5sum = checksums['md5sum']
    if compute_sha512:
        datafile.sha512sum = checksums['sha512sum']
    datafile.save()
    datafile.file_object = tempfile
    return datafile
Exemple #44
0
    def test_003_offline_dataset(self, mock_stat):
        """A dataset should be offline if any datafiles are offline"""
        mock_stat.return_value = Stats(st_size=10000,
                                       st_blocks=0,
                                       st_mtime=datetime.now())
        ds = Dataset(description="Dataset2", instrument=self.inst)
        ds.save()

        df2 = DataFile(dataset=ds, filename="test_file.jpg")
        df2.save()
        dfo2 = DataFileObject(datafile=df2,
                              storage_box=self.sbox1,
                              uri=df2.filename)
        dfo2.save()
        df2.verify()

        self.assertFalse(dataset_online(ds))
Exemple #45
0
 def _build(dataset, filename, url=None):
     datafile = DataFile(dataset=dataset, filename=filename)
     datafile.save()
     if url is None:
         datafile.file_object = StringIO(u'bla')
         return datafile
     dfo = DataFileObject(
         datafile=datafile,
         storage_box=datafile.get_default_storage_box(),
         uri=url)
     dfo.save()
     # Tests are run with CELERY_ALWAYS_EAGER = True,
     # so saving a DFO will trigger an immediate attempt
     # to verify the DFO which will trigger an attempt
     # to apply filters because we are overriding the
     # USE_FILTERS setting to True in this test:
     self.assertNotEqual(mock_send_task.call_count, 0)
     return datafile
    def testLocalFile(self):
        content = urandom(1024)
        cf = ContentFile(content, 'background_task_testfile')

        # Create new Datafile
        datafile = DataFile(dataset=self.dataset)
        datafile.filename = cf.name
        datafile.size = len(content)
        datafile.sha512sum = hashlib.sha512(content).hexdigest()
        datafile.save()
        datafile.file_object = cf

        dfo = datafile.file_objects.all()[0]
        # undo auto-verify:
        dfo.verified = False
        dfo.save(update_fields=['verified'])

        # Check that it's not currently verified
        expect(datafile.verified).to_be(False)
        # Check it verifies
        verify_dfos()
        expect(datafile.verified).to_be(True)
Exemple #47
0
def _create_datafile():
    user = User.objects.create_user("testuser", "*****@*****.**", "pwd")
    user.save()

    full_access = Experiment.PUBLIC_ACCESS_FULL
    experiment = Experiment.objects.create(title="IIIF Test", created_by=user, public_access=full_access)
    experiment.save()
    ObjectACL(
        content_object=experiment,
        pluginId="django_user",
        entityId=str(user.id),
        isOwner=True,
        canRead=True,
        canWrite=True,
        canDelete=True,
        aclOwnershipType=ObjectACL.OWNER_OWNED,
    ).save()
    dataset = Dataset()
    dataset.save()
    dataset.experiments.add(experiment)
    dataset.save()

    # Create new Datafile
    tempfile = TemporaryUploadedFile("iiif_stored_file", None, None, None)
    with Image(filename="magick:rose") as img:
        img.format = "tiff"
        img.save(file=tempfile.file)
        tempfile.file.flush()
    datafile = DataFile(
        dataset=dataset, size=os.path.getsize(tempfile.file.name), filename="iiif_named_file", mimetype="image/tiff"
    )
    checksums = compute_checksums(open(tempfile.file.name, "r"))
    datafile.md5sum = checksums["md5sum"]
    datafile.sha512sum = checksums["sha512sum"]
    datafile.save()
    datafile.file_object = tempfile
    return datafile
Exemple #48
0
class ViewTemplateContextsTest(TestCase):

    def setUp(self):
        """
        setting up essential objects, copied from tests above
        """

        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)
        self.userProfile = self.user.userprofile
        self.exp = Experiment(title='test exp1',
                              institution_name='monash', created_by=self.user)
        self.exp.save()
        self.acl = ObjectACL(
            pluginId=django_user,
            entityId=str(self.user.id),
            content_object=self.exp,
            canRead=True,
            isOwner=True,
            aclOwnershipType=ObjectACL.OWNER_OWNED,
        )
        self.acl.save()
        self.dataset = Dataset(description='dataset description...')
        self.dataset.save()
        self.dataset.experiments.add(self.exp)
        self.dataset.save()

        self.datafile = DataFile(dataset=self.dataset,
                                 size=42, filename="foo",
                                 md5sum="junk")
        self.datafile.save()

    def tearDown(self):
        self.user.delete()
        self.exp.delete()
        self.dataset.delete()
        self.datafile.delete()
        self.acl.delete()

    def testExperimentView(self):
        """
        test some template context parameters for an experiment view
        """
        from tardis.tardis_portal.views import ExperimentView
        from django.http import HttpRequest
        import sys

        # Default behavior
        views_module = flexmock(sys.modules['tardis.tardis_portal.views'])
        request = HttpRequest()
        request.method = 'GET'
        request.user = self.user
        request.groups = []
        context = {'organization': ['test', 'test2'],
                   'default_organization': 'test',
                   'default_format': 'tar',
                   'protocol': [['tgz', '/download/experiment/1/tgz/'],
                                ['tar', '/download/experiment/1/tar/']]}
        views_module.should_call('render_response_index'). \
            with_args(_AnyMatcher(), "tardis_portal/view_experiment.html",
                      _ContextMatcher(context))
        view_fn = ExperimentView.as_view()
        response = view_fn(request, experiment_id=self.exp.id)
        self.assertEqual(response.status_code, 200)

        # Behavior with USER_AGENT_SENSING enabled and a request.user_agent
        saved_setting = getattr(settings, "USER_AGENT_SENSING", None)
        try:
            setattr(settings, "USER_AGENT_SENSING", True)
            request = HttpRequest()
            request.method = 'GET'
            request.user = self.user
            request.groups = []
            mock_agent = _MiniMock(os=_MiniMock(family="Macintosh"))
            setattr(request, 'user_agent', mock_agent)
            context = {'organization': ['classic', 'test', 'test2'],
                       'default_organization': 'classic',
                       'default_format': 'tar',
                       'protocol': [['tar', '/download/experiment/1/tar/']]}
            views_module.should_call('render_response_index'). \
                with_args(_AnyMatcher(), "tardis_portal/view_experiment.html",
                          _ContextMatcher(context))
            view_fn = ExperimentView.as_view()
            response = view_fn(request, experiment_id=self.exp.id)
            self.assertEqual(response.status_code, 200)
        finally:
            if saved_setting is not None:
                setattr(settings, "USER_AGENT_SENSING", saved_setting)
            else:
                delattr(settings, "USER_AGENT_SENSING")

    def testDatasetView(self):
        """
        test some context parameters for a dataset view
        """
        from tardis.tardis_portal.views import DatasetView
        from django.http import HttpRequest
        import sys

        views_module = flexmock(sys.modules['tardis.tardis_portal.views'])
        request = HttpRequest()
        request.method = 'GET'
        request.user = self.user
        request.groups = []
        context = {'default_organization': 'test',
                   'default_format': 'tar'}
        views_module.should_call('render_response_index'). \
            with_args(_AnyMatcher(), "tardis_portal/view_dataset.html",
                      _ContextMatcher(context))
        view_fn = DatasetView.as_view()
        response = view_fn(request, dataset_id=self.dataset.id)
        self.assertEqual(response.status_code, 200)

        # Behavior with USER_AGENT_SENSING enabled and a request.user_agent
        saved_setting = getattr(settings, "USER_AGENT_SENSING", None)
        try:
            setattr(settings, "USER_AGENT_SENSING", True)
            request = HttpRequest()
            request.method = 'GET'
            request.user = self.user
            request.groups = []
            mock_agent = _MiniMock(os=_MiniMock(family="Macintosh"))
            setattr(request, 'user_agent', mock_agent)
            context = {'default_organization': 'classic',
                       'default_format': 'tar'}
            views_module.should_call('render_response_index'). \
                with_args(_AnyMatcher(), "tardis_portal/view_dataset.html",
                          _ContextMatcher(context))
            view_fn = DatasetView.as_view()
            response = view_fn(request, dataset_id=self.dataset.id)
            self.assertEqual(response.status_code, 200)
        finally:
            if saved_setting is not None:
                setattr(settings, "USER_AGENT_SENSING", saved_setting)
            else:
                delattr(settings, "USER_AGENT_SENSING")
    def setUp(self):
        from django.contrib.auth.models import User
        from tempfile import mkdtemp

        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)

        self.test_dir = mkdtemp()

        self.exp = Experiment(title='test exp1',
                              institution_name='monash',
                              created_by=self.user)
        self.exp.save()

        self.dataset = Dataset(description="dataset description...")
        self.dataset.save()
        self.dataset.experiments.add(self.exp)
        self.dataset.save()

        self.datafile = DataFile(dataset=self.dataset,
                                 filename="testfile.txt",
                                 size="42", md5sum='bogus')
        self.datafile.save()

        self.dfo = DataFileObject(
            datafile=self.datafile,
            storage_box=self.datafile.get_default_storage_box(),
            uri="1/testfile.txt")
        self.dfo.save()

        self.schema = Schema(
            namespace="http://localhost/psmtest/df/",
            name="Parameter Set Manager", type=3)
        self.schema.save()

        self.parametername1 = ParameterName(
            schema=self.schema, name="parameter1",
            full_name="Parameter 1")
        self.parametername1.save()

        self.parametername2 = ParameterName(
            schema=self.schema, name="parameter2",
            full_name="Parameter 2",
            data_type=ParameterName.NUMERIC)
        self.parametername2.save()

        self.parametername3 = ParameterName(
            schema=self.schema, name="parameter3",
            full_name="Parameter 3",
            data_type=ParameterName.DATETIME)
        self.parametername3.save()

        self.datafileparameterset = DatafileParameterSet(
            schema=self.schema, datafile=self.datafile)
        self.datafileparameterset.save()

        self.datafileparameter1 = DatafileParameter(
            parameterset=self.datafileparameterset,
            name=self.parametername1, string_value="test1")
        self.datafileparameter1.save()

        self.datafileparameter2 = DatafileParameter(
            parameterset=self.datafileparameterset,
            name=self.parametername2, numerical_value=2)
        self.datafileparameter2.save()

        # Create a ParameterName and Parameter of type LINK to an experiment
        self.parametername_exp_link = ParameterName(
            schema=self.schema, name="exp_link",
            full_name="This parameter is a experiment LINK",
            data_type=ParameterName.LINK)
        self.parametername_exp_link.save()

        self.exp_link_param = DatafileParameter(
            parameterset=self.datafileparameterset,
            name=self.parametername_exp_link)
        exp_url = self.exp.get_absolute_url()  # /experiment/view/1/
        self.exp_link_param.set_value(exp_url)
        self.exp_link_param.save()

        # Create a ParameterName and Parameter of type LINK to a dataset
        self.parametername_dataset_link = ParameterName(
            schema=self.schema, name="dataset_link",
            full_name="This parameter is a dataset LINK",
            data_type=ParameterName.LINK)
        self.parametername_dataset_link.save()

        self.dataset_link_param = DatafileParameter(
            parameterset=self.datafileparameterset,
            name=self.parametername_dataset_link)
        dataset_url = self.dataset.get_absolute_url()  # /dataset/1/
        self.dataset_link_param.set_value(dataset_url)
        self.dataset_link_param.save()

        # Create a ParameterName type LINK to an unresolvable (non-URL)
        # free-text value
        self.parametername_unresolvable_link = ParameterName(
                schema=self.schema, name="freetext_link",
                full_name="This parameter is a non-URL LINK",
                data_type=ParameterName.LINK)
        self.parametername_unresolvable_link.save()
class ParameterSetManagerTestCase(TestCase):

    def setUp(self):
        from django.contrib.auth.models import User
        from tempfile import mkdtemp

        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)

        self.test_dir = mkdtemp()

        self.exp = Experiment(title='test exp1',
                              institution_name='monash',
                              created_by=self.user)
        self.exp.save()

        self.dataset = Dataset(description="dataset description...")
        self.dataset.save()
        self.dataset.experiments.add(self.exp)
        self.dataset.save()

        self.datafile = DataFile(dataset=self.dataset,
                                 filename="testfile.txt",
                                 size="42", md5sum='bogus')
        self.datafile.save()

        self.dfo = DataFileObject(
            datafile=self.datafile,
            storage_box=self.datafile.get_default_storage_box(),
            uri="1/testfile.txt")
        self.dfo.save()

        self.schema = Schema(
            namespace="http://localhost/psmtest/df/",
            name="Parameter Set Manager", type=3)
        self.schema.save()

        self.parametername1 = ParameterName(
            schema=self.schema, name="parameter1",
            full_name="Parameter 1")
        self.parametername1.save()

        self.parametername2 = ParameterName(
            schema=self.schema, name="parameter2",
            full_name="Parameter 2",
            data_type=ParameterName.NUMERIC)
        self.parametername2.save()

        self.parametername3 = ParameterName(
            schema=self.schema, name="parameter3",
            full_name="Parameter 3",
            data_type=ParameterName.DATETIME)
        self.parametername3.save()

        self.datafileparameterset = DatafileParameterSet(
            schema=self.schema, datafile=self.datafile)
        self.datafileparameterset.save()

        self.datafileparameter1 = DatafileParameter(
            parameterset=self.datafileparameterset,
            name=self.parametername1, string_value="test1")
        self.datafileparameter1.save()

        self.datafileparameter2 = DatafileParameter(
            parameterset=self.datafileparameterset,
            name=self.parametername2, numerical_value=2)
        self.datafileparameter2.save()

        # Create a ParameterName and Parameter of type LINK to an experiment
        self.parametername_exp_link = ParameterName(
            schema=self.schema, name="exp_link",
            full_name="This parameter is a experiment LINK",
            data_type=ParameterName.LINK)
        self.parametername_exp_link.save()

        self.exp_link_param = DatafileParameter(
            parameterset=self.datafileparameterset,
            name=self.parametername_exp_link)
        exp_url = self.exp.get_absolute_url()  # /experiment/view/1/
        self.exp_link_param.set_value(exp_url)
        self.exp_link_param.save()

        # Create a ParameterName and Parameter of type LINK to a dataset
        self.parametername_dataset_link = ParameterName(
            schema=self.schema, name="dataset_link",
            full_name="This parameter is a dataset LINK",
            data_type=ParameterName.LINK)
        self.parametername_dataset_link.save()

        self.dataset_link_param = DatafileParameter(
            parameterset=self.datafileparameterset,
            name=self.parametername_dataset_link)
        dataset_url = self.dataset.get_absolute_url()  # /dataset/1/
        self.dataset_link_param.set_value(dataset_url)
        self.dataset_link_param.save()

        # Create a ParameterName type LINK to an unresolvable (non-URL)
        # free-text value
        self.parametername_unresolvable_link = ParameterName(
                schema=self.schema, name="freetext_link",
                full_name="This parameter is a non-URL LINK",
                data_type=ParameterName.LINK)
        self.parametername_unresolvable_link.save()

    def tearDown(self):
        self.exp.delete()
        self.user.delete()
        self.parametername1.delete()
        self.parametername2.delete()
        self.parametername3.delete()
        self.parametername_exp_link.delete()
        self.parametername_dataset_link.delete()
        self.parametername_unresolvable_link.delete()
        self.schema.delete()

    def test_existing_parameterset(self):

        psm = ParameterSetManager(parameterset=self.datafileparameterset)

        self.assertTrue(psm.get_schema().namespace ==
                        "http://localhost/psmtest/df/")

        self.assertTrue(psm.get_param("parameter1").string_value == "test1")

        self.assertTrue(psm.get_param("parameter2", True) == 2)

    def test_new_parameterset(self):

        psm = ParameterSetManager(parentObject=self.datafile,
                                  schema="http://localhost/psmtest/df2/")

        self.assertTrue(psm.get_schema().namespace ==
                        "http://localhost/psmtest/df2/")

        psm.set_param("newparam1", "test3", "New Parameter 1")

        self.assertTrue(psm.get_param("newparam1").string_value ==
                        "test3")

        self.assertTrue(psm.get_param("newparam1").name.full_name ==
                        "New Parameter 1")

        psm.new_param("newparam1", "test4")

        self.assertTrue(len(psm.get_params("newparam1", True)) == 2)

        psm.set_param_list("newparam2", ("a", "b", "c", "d"))

        self.assertTrue(len(psm.get_params("newparam2")) == 4)

        psm.set_params_from_dict(
            {"newparam2": "test5", "newparam3": 3})

        self.assertTrue(psm.get_param("newparam2", True) == "test5")

        # the newparam3 gets created and '3' is set to a string_value
        # since once cannot assume that an initial numeric value
        # will imply continuing numeric type for this new param
        self.assertTrue(psm.get_param("newparam3").string_value == '3')

        psm.delete_params("newparam1")

        self.assertTrue(len(psm.get_params("newparam1", True)) == 0)

    def test_link_parameter_type(self):
        """
        Test that Parameter.link_gfk (GenericForeignKey) is correctly
        assigned after using Parameter.set_value(some_url) for a LINK Parameter.
        """
        psm = ParameterSetManager(parameterset=self.datafileparameterset)

        # Check link to experiment
        exp_url = self.exp.get_absolute_url()  # /experiment/view/1/
        self.assertTrue(psm.get_param("exp_link").string_value ==
                        exp_url)

        self.assertTrue(psm.get_param("exp_link").link_id ==
                        self.exp.id)

        exp_ct = ContentType.objects.get(model__iexact="experiment")
        self.assertTrue(psm.get_param("exp_link").link_ct == exp_ct)

        self.assertTrue(psm.get_param("exp_link").link_gfk == self.exp)

        # Check link to dataset
        dataset_url = self.dataset.get_absolute_url()  # /dataset/1/
        self.assertTrue(psm.get_param("dataset_link").string_value ==
                        dataset_url)

        self.assertTrue(psm.get_param("dataset_link").link_id ==
                        self.dataset.id)

        dataset_ct = ContentType.objects.get(model__iexact="dataset")
        self.assertTrue(psm.get_param("dataset_link").link_ct == dataset_ct)

        self.assertTrue(psm.get_param("dataset_link").link_gfk == self.dataset)

    def test_link_parameter_type_extra(self):
        # make a second ParameterSet for testing some variations
        # in URL values
        self.datafileparameterset2 = DatafileParameterSet(
            schema=self.schema, datafile=self.datafile)
        self.datafileparameterset2.save()

        psm = ParameterSetManager(parameterset=self.datafileparameterset2)

        self.dataset_link_param2 = DatafileParameter(
            parameterset=self.datafileparameterset2,
            name=self.parametername_dataset_link)
        # /dataset/1 - no trailing slash
        dataset_url = self.dataset.get_absolute_url()
        self.dataset_link_param2.set_value(dataset_url)
        self.dataset_link_param2.save()

        # Check link_id/link_ct/link_gfk to dataset
        self.assertTrue(psm.get_param("dataset_link").link_id ==
                        self.dataset.id)

        dataset_ct = ContentType.objects.get(model__iexact="dataset")
        self.assertTrue(psm.get_param("dataset_link").link_ct == dataset_ct)

        self.assertTrue(psm.get_param("dataset_link").link_gfk == self.dataset)

        # Test links of the form /api/v1/experiment/<experiment_id>/
        self.exp_link_param2 = DatafileParameter(
            parameterset=self.datafileparameterset2,
            name=self.parametername_exp_link)
        exp_url = '/api/v1/experiment/%s/' % self.exp.id
        self.exp_link_param2.set_value(exp_url)
        self.exp_link_param2.save()

        # Check link_id/link_ct/link_gfk to experiment
        self.assertTrue(psm.get_param("exp_link").link_id ==
                        self.exp.id)

        exp_ct = ContentType.objects.get(model__iexact="experiment")
        self.assertTrue(psm.get_param("exp_link").link_ct == exp_ct)

        self.assertTrue(psm.get_param("exp_link").link_gfk == self.exp)

    def test_unresolvable_link_parameter(self):
        """
        Test that LINK Parameters that can't be resolved to a model (including
        non-URL values) still work.
        """
        self.datafileparameterset3 = DatafileParameterSet(
                schema=self.schema, datafile=self.datafile)
        self.datafileparameterset3.save()

        psm = ParameterSetManager(parameterset=self.datafileparameterset3)

        # Create a Parameter of type LINK to an unresolvable (non-URL)
        # free-text value
        self.freetext_link_param = DatafileParameter(
                parameterset=self.datafileparameterset3,
                name=self.parametername_unresolvable_link)
        self.assertRaises(SuspiciousOperation,
                          lambda: self.freetext_link_param.set_value(
                              "FREETEXT_ID_123"))

    def test_tz_naive_date_handling(self):
        """
        Ensure that dates are handling in a timezone-aware way.
        """
        psm = ParameterSetManager(parameterset=self.datafileparameterset)

        psm.new_param("parameter3", str(datetime(1970, 01, 01, 10, 0, 0)))

        expect(psm.get_param("parameter3", True))\
            .to_equal(datetime(1970, 01, 01, 0, 0, 0, tzinfo=pytz.utc))

    def test_tz_aware_date_handling(self):
        """
        Ensure that dates are handling in a timezone-aware way.
        """
        psm = ParameterSetManager(parameterset=self.datafileparameterset)

        psm.new_param("parameter3",
                      '1970-01-01T08:00:00+08:00')

        expect(psm.get_param("parameter3", True))\
            .to_equal(datetime(1970, 01, 01, 0, 0, 0, tzinfo=pytz.utc))
Exemple #51
0
class ContextualViewTest(TestCase):

    def setUp(self):
        """
        setting up essential objects, copied from tests above
        """
        user = '******'
        pwd = 'secret'
        email = ''
        self.user = User.objects.create_user(user, email, pwd)
        self.userProfile = self.user.userprofile
        self.exp = Experiment(title='test exp1',
                              institution_name='monash', created_by=self.user)
        self.exp.save()
        self.acl = ObjectACL(
            pluginId=django_user,
            entityId=str(self.user.id),
            content_object=self.exp,
            canRead=True,
            isOwner=True,
            aclOwnershipType=ObjectACL.OWNER_OWNED,
        )
        self.acl.save()
        self.dataset = Dataset(description='dataset description...')
        self.dataset.save()
        self.dataset.experiments.add(self.exp)
        self.dataset.save()

        self.datafile = DataFile(dataset=self.dataset,
                                 size=42, filename="foo",
                                 md5sum="junk")
        self.datafile.save()

        self.testschema = Schema(namespace="http://test.com/test/schema",
                                 name="Test View",
                                 type=Schema.DATAFILE,
                                 hidden=True)
        self.testschema.save()
        self.dfps = DatafileParameterSet(datafile=self.datafile,
                                         schema=self.testschema)
        self.dfps.save()

    def tearDown(self):
        self.user.delete()
        self.exp.delete()
        self.dataset.delete()
        self.datafile.delete()
        self.testschema.delete()
        self.dfps.delete()
        self.acl.delete()

    def testDetailsDisplay(self):
        """
        test display of view for an existing schema and no display for an
        undefined one.
        """
        from tardis.tardis_portal.views import display_datafile_details
        request = flexmock(user=self.user, groups=[("testgroup", flexmock())])
        with self.settings(DATAFILE_VIEWS=[
                ("http://test.com/test/schema", "/test/url"),
                ("http://does.not.exist", "/false/url")]):
            response = display_datafile_details(
                request, datafile_id=self.datafile.id)
            self.assertEqual(response.status_code, 200)
            self.assertTrue("/ajax/parameters/" in response.content)
            self.assertTrue("/test/url" in response.content)
            self.assertFalse("/false/url" in response.content)
Exemple #52
0
class SimpleSearchTest(MyTardisResourceTestCase):
    def setUp(self):
        super(SimpleSearchTest, self).setUp()
        self.out = StringIO()
        call_command('search_index',
                     stdout=self.out,
                     action='delete',
                     force=True)
        call_command('search_index',
                     stdout=self.out,
                     action='rebuild',
                     force=True)
        # add dataset and datafile to experiment
        self.dataset1 = Dataset(description='test_dataset')
        self.dataset1.save()
        self.dataset1.experiments.add(self.testexp)
        self.dataset1.save()
        settings.REQUIRE_DATAFILE_SIZES = False
        settings.REQUIRE_DATAFILE_CHECKSUMS = False
        self.datafile = DataFile(dataset=self.dataset1, filename='test.txt')
        self.datafile.save()

    def test_simple_search_authenticated_user(self):
        response = self.api_client.get(
            '/api/v1/search_simple-search/?query=test',
            authentication=self.get_credentials())
        data = json.loads(response.content.decode())
        self.assertEqual(len(data['objects'][0]['hits']['experiments']), 1)
        self.assertEqual(len(data['objects'][0]['hits']['datasets']), 1)
        self.assertEqual(len(data['objects'][0]['hits']['datafiles']), 1)

    def test_simple_search_unauthenticated_user(self):
        self.testexp.public_access = 100
        self.testexp.save()
        response = self.api_client.get(
            '/api/v1/search_simple-search/?query=test')
        data = json.loads(response.content.decode())
        self.assertEqual(len(data['objects'][0]['hits']['experiments']), 1)
        self.assertEqual(len(data['objects'][0]['hits']['datasets']), 1)
        self.assertEqual(len(data['objects'][0]['hits']['datafiles']), 1)

    def test_advance_search_unauthenticated_user(self):
        self.testexp.public_access = 100
        self.testexp.save()
        response = self.api_client.post(
            '/api/v1/search_advance-search/',
            data={
                "text": "test",
                "TypeTag": ["Dataset", "Experiment", "Datafile"]
            })
        data = json.loads(response.content.decode())
        self.assertEqual(len(data['hits']['experiments']), 1)
        self.assertEqual(len(data['hits']['datasets']), 1)
        self.assertEqual(len(data['hits']['datafiles']), 1)

    def test_advance_search_authenticated_user(self):
        response = self.api_client.post(
            '/api/v1/search_advance-search/',
            data={
                "text": "test",
                "TypeTag": ["Dataset", "Experiment", "Datafile"]
            },
            authentication=self.get_credentials())
        data = json.loads(response.content.decode())
        self.assertEqual(len(data['hits']['experiments']), 1)
        self.assertEqual(len(data['hits']['datasets']), 1)
        self.assertEqual(len(data['hits']['datafiles']), 1)