def test_000_update_df_status_offline(self, mock_stat): """update_df_status should check the online status of preferred DFOs for all previously online datafiles and update online Parameter to 'False' for any offline files.""" df1 = DataFile(dataset=self.dataset, filename="test_df.jpg") df1.save() dfo1 = DataFileObject(datafile=df1, storage_box=self.sbox1, uri="stream/test.jpg", verified=True) dfo1.save() schema = Schema.objects.get(namespace=HSM_DATAFILE_NAMESPACE) ps = DatafileParameterSet(schema=schema, datafile=df1) ps.save() param_name = ParameterName.objects.get(schema=schema, name="online") param = DatafileParameter(parameterset=ps, name=param_name) param.string_value = True param.save() mock_stat.return_value = Stats(st_size=10000, st_blocks=0, st_mtime=datetime.now()) update_df_status() params = DatafileParameter.objects.filter( parameterset__schema=schema, parameterset__datafile=df1) self.assertEquals(params.count(), 1) self.assertEquals(params[0].string_value, "False")
def upload(request, dataset_id): """ Uploads a datafile to the store and datafile metadata :param request: a HTTP Request instance :type request: :class:`django.http.HttpRequest` :param dataset_id: the dataset_id :type dataset_id: integer :returns: boolean true if successful :rtype: bool """ dataset = Dataset.objects.get(id=dataset_id) logger.debug('called upload') if request.method == 'POST': logger.debug('got POST') if request.FILES: uploaded_file_post = request.FILES['Filedata'] logger.debug('done upload') datafile = DataFile(dataset=dataset, filename=uploaded_file_post.name, size=uploaded_file_post.size) datafile.save(require_checksums=False) logger.debug('created file') datafile.file_object = uploaded_file_post logger.debug('saved datafile') return HttpResponse('True')
def fpupload(request, dataset_id): """ Uploads all files picked by filepicker to the dataset :param request: a HTTP Request instance :type request: :class:`django.http.HttpRequest` :param dataset_id: the dataset_id :type dataset_id: integer :returns: boolean true if successful :rtype: bool """ dataset = Dataset.objects.get(id=dataset_id) logger.debug('called fpupload') if request.method == 'POST': logger.debug('got POST') for key, val in request.POST.items(): splits = val.split(",") for url in splits: try: fp = FilepickerFile(url) except ValueError: pass else: picked_file = fp.get_file() datafile = DataFile(dataset=dataset, filename=picked_file.name, size=picked_file.size) datafile.save() datafile.file_object = picked_file return HttpResponse(json.dumps({"result": True}))
def test_003_update_df_status_skip_offline(self, mock_stat, mock_df_online): """update_df_status should skip any files that have previously marked as offline.""" df2 = DataFile(dataset=self.dataset, filename="test_df2.jpg") df2.save() dfo2 = DataFileObject(datafile=df2, storage_box=self.sbox1, uri="stream/test_df2.jpg", verified=True) dfo2.save() # df2.verify() schema = Schema.objects.get(namespace=HSM_DATAFILE_NAMESPACE) ps2 = DatafileParameterSet(schema=schema, datafile=df2) ps2.save() param_name = ParameterName.objects.get(schema=schema, name="online") param2 = DatafileParameter(parameterset=ps2, name=param_name) param2.string_value = False param2.save() mock_stat.return_value = Stats(st_size=10000, st_blocks=100, st_mtime=datetime.now()) update_df_status() # assert that the df_online method wasn't called self.assertEquals(mock_df_online.call_count, 0)
def test_001_create_df_status(self, mock_stat): """When a new datafile record is verified, metadata for it's online/offline status should be created and populated with the current online status""" mock_stat.return_value = Stats(st_size=10000, st_blocks=100, st_mtime=datetime.now()) temp = tempfile.NamedTemporaryFile(dir=tempfile.gettempdir()) temp_name = os.path.basename(temp.name) df2 = DataFile(dataset=self.dataset, filename=temp_name) df2.save() dfo2 = DataFileObject(datafile=df2, storage_box=self.sbox1, uri=temp_name) dfo2.save() df2.verify() param_name = ParameterName.objects.get( schema__namespace=HSM_DATAFILE_NAMESPACE, name="online") paramset = DatafileParameterSet.objects.get( schema__namespace=HSM_DATAFILE_NAMESPACE, datafile=df2) param = DatafileParameter.objects.get(parameterset=paramset, name=param_name) self.assertEquals(param.string_value, "True") temp.close()
def test_002_no_duplicate_params(self, mock_stat): """Datafile should only ever have one online param""" mock_stat.return_value = Stats(st_size=10000, st_blocks=100, st_mtime=datetime.now()) df1 = DataFile(dataset=self.dataset, filename="test_df.jpg") df1.save() dfo1 = DataFileObject(datafile=df1, storage_box=self.sbox1, uri="stream/test.jpg", verified=True) dfo1.save() df1.verify() param_name = ParameterName.objects.get( schema__namespace=HSM_DATAFILE_NAMESPACE, name="online") paramset = DatafileParameterSet.objects.get( schema__namespace=HSM_DATAFILE_NAMESPACE, datafile=df1) params = DatafileParameter.objects.filter(parameterset=paramset, name=param_name) self.assertEquals(params.count(), 1) self.assertRaises(OnlineParamExistsError, create_df_status(df1, HSM_DATAFILE_NAMESPACE, 500)) params = DatafileParameter.objects.filter(parameterset=paramset, name=param_name) self.assertEquals(params.count(), 1)
def register_squashfile(exp_id, epn, sq_dir, sq_filename, namespace): ''' example: register_squashfile(456, '1234A', '/srv/squashstore', '1234A.squashfs', 'http://synchrotron.org.au/mx/squashfsarchive/1') ''' dfs = DataFile.objects.filter(filename=sq_filename, dataset__experiments__id=exp_id) if len(dfs) == 1: return dfs[0] e = Experiment.objects.get(id=exp_id) ds = Dataset(description="01 SquashFS Archive") ds.save() ds.experiments.add(e) filepath = os.path.join(sq_dir, sq_filename) try: md5sum = open(filepath + '.md5sum', 'r').read().strip()[:32] except: print 'no md5sum file found' return None size = os.path.getsize(filepath) df = DataFile(md5sum=md5sum, filename=sq_filename, size=str(size), dataset=ds) df.save() schema = Schema.objects.filter(namespace=namespace)[0] ps = DatafileParameterSet(schema=schema, datafile=df) ps.save() ps.set_param('EPN', epn) sbox = StorageBox.objects.get(name='squashstore') dfo = DataFileObject(storage_box=sbox, datafile=df, uri=sq_filename) dfo.save() return df
def create_dfo(self, top, filename, dataset=None): ''' create dfo and datafile if necessary ''' df, df_data = self.find_datafile(top, filename) if df is None and df_data is None: return True # is a link if df: if dataset is not None and df.dataset.id != dataset.id: # olddataset_id = df.dataset.id df.dataset = dataset df.save() # oldds = Dataset.objects.get(id=olddataset_id) # if oldds.datafile_set.count() == 0: # oldds.delete() elif dataset is None and top.startswith('frames'): prefix = 'Raw data for' prefix_dataset(df.dataset, prefix) self.update_dataset(df.dataset, top) else: if dataset is None: dataset = self.get_or_create_dataset('lost and found') df = DataFile(dataset=dataset, filename=filename, directory=top, **df_data) df.save() dfo = DataFileObject(datafile=df, storage_box=self.s_box, uri=os.path.join(top, filename)) dfo.save() return True
def _create_test_dataset(nosDatafiles): ds_ = Dataset(description='happy snaps of plumage') ds_.save() for i in range(0, nosDatafiles): df_ = DataFile(dataset=ds_, filename='file_%d' % i, size='21', sha512sum='bogus') df_.save() ds_.save() return ds_
def create_staging_datafile(filepath, username, dataset_id): from tardis.tardis_portal.models import DataFile, Dataset dataset = Dataset.objects.get(id=dataset_id) url, size = get_staging_url_and_size(username, filepath) datafile = DataFile(dataset=dataset, filename=path.basename(filepath), size=size) datafile.save() datafile.file_object = open(filepath, 'r')
def setUp(self): raise SkipTest # temporarily disabling this feature, needs coding from tempfile import mkdtemp, mktemp from django.conf import settings import os # Disconnect post_save signal from django.db.models.signals import post_save from tardis.tardis_portal.models import Experiment, \ staging_hook, Dataset, DataFile, DataFileObject, StorageBox post_save.disconnect(staging_hook, sender=DataFileObject) from django.contrib.auth.models import User user = '******' pwd = 'secret' email = '' self.user = User.objects.create_user(user, email, pwd) try: os.makedirs(settings.GET_FULL_STAGING_PATH_TEST) except OSError: pass self.temp = mkdtemp(dir=settings.GET_FULL_STAGING_PATH_TEST) self.filepath = mktemp(dir=self.temp) content = 'test file' with open(self.filepath, "w+b") as f: f.write(content) # make datafile exp = Experiment(title='test exp1', institution_name='monash', created_by=self.user) exp.save() # make dataset dataset = Dataset(description="dataset description...") dataset.save() dataset.experiments.add(exp) dataset.save() # create datafile df = DataFile(dataset=dataset, size=len(content), filename=path.basename(self.file), md5sum='f20d9f2072bbeb6691c0f9c5099b01f3') df.save() # create replica base_url = settings.GET_FULL_STAGING_PATH_TEST s_box = StorageBox.get_default_storage(location=base_url) dfo = DataFileObject(datafile=df, uri=self.filepath, storage_box=s_box) dfo.save() self.dfo = dfo
def create_staging_datafile(filepath, username, dataset_id): init_filters() from tardis.tardis_portal.models import DataFile, Dataset dataset = Dataset.objects.get(id=dataset_id) url, size = get_staging_url_and_size(username, filepath) datafile = DataFile(dataset=dataset, filename=path.basename(filepath), size=size) datafile.save() datafile.file_object = open(filepath, 'r')
def _build(dataset, filename, url): from tardis.tardis_portal.models import \ DataFileObject datafile = DataFile(dataset=dataset, filename=filename) datafile.save() dfo = DataFileObject( datafile=datafile, storage_box=datafile.get_default_storage_box(), uri=url) dfo.save() return datafile
def _build(dataset, filename, url): datafile_content = b"\n".join([b'some data %d' % i for i in range(1000)]) filesize = len(datafile_content) datafile = DataFile( dataset=dataset, filename=filename, size=filesize) datafile.save() dfo = DataFileObject( datafile=datafile, storage_box=datafile.get_default_storage_box(), uri=url) dfo.file_object = BytesIO(datafile_content) dfo.save() return datafile
def _build(dataset, filename, url): datafile_content = u"\n".join([u'some data %d' % i for i in range(1000)]) filesize = len(datafile_content) datafile = DataFile( dataset=dataset, filename=filename, size=filesize) datafile.save() dfo = DataFileObject( datafile=datafile, storage_box=datafile.get_default_storage_box(), uri=url) dfo.file_object = StringIO(datafile_content) dfo.save() return datafile
def test_wrong_size_verification(self): content = urandom(1024) cf = ContentFile(content, 'background_task_testfile') # Create new Datafile datafile = DataFile(dataset=self.dataset) datafile.filename = cf.name datafile.size = len(content) - 1 datafile.sha512sum = hashlib.sha512(content).hexdigest() datafile.save() datafile.file_object = cf # verify explicitly to catch Exceptions hidden by celery datafile.verify() self.assertFalse(datafile.file_objects.get().verified)
def _build(dataset, filename, url=None): datafile = DataFile(dataset=dataset, filename=filename) datafile.save() if url is None: datafile.file_object = StringIO('bla') return datafile from tardis.tardis_portal.models import \ DataFileObject dfo = DataFileObject( datafile=datafile, storage_box=datafile.get_default_storage_box(), uri=url) dfo.save() return datafile
def test_007_dfo_unverified(self): """df_online and dfo_online should raise Exception for an unverfied DataFile or DataFileObject, respectively""" df2 = DataFile(dataset=self.dataset, filename="test_df.jpg") df2.save() self.assertRaises(DataFileNotVerified, df_online, df2) dfo2 = DataFileObject(datafile=df2, storage_box=self.sbox1, uri="stream/test.jpg", verified=False) dfo2.save() self.assertRaises(DataFileObjectNotVerified, dfo_online, dfo2)
def _build_datafile(self, testfile, filename, dataset, checksum=None, size=None, mimetype=''): filesize, sha512sum = get_size_and_sha512sum(testfile) datafile = DataFile(dataset=dataset, filename=filename, mimetype=mimetype, size=size if size is not None else filesize, sha512sum=(checksum if checksum else sha512sum)) datafile.save() dfo = DataFileObject( datafile=datafile, storage_box=datafile.get_default_storage_box()) dfo.save() with open(testfile, 'r') as sourcefile: dfo.file_object = sourcefile return DataFile.objects.get(pk=datafile.pk)
def create_datafile(index): testfile = path.join(base_path, 'jeol_sem_test%d.txt' % index) size, sha512sum = get_size_and_sha512sum(testfile) datafile = DataFile(dataset=dataset, filename=path.basename(testfile), size=size, sha512sum=sha512sum) datafile.save() dfo = DataFileObject( datafile=datafile, storage_box=s_box, uri=path.basename(testfile)) dfo.save() return DataFile.objects.get(pk=datafile.pk)
def test_003_offline_dataset(self, mock_stat): """A dataset should be offline if any datafiles are offline""" mock_stat.return_value = Stats(st_size=10000, st_blocks=0, st_mtime=datetime.now()) ds = Dataset(description="Dataset2", instrument=self.inst) ds.save() df2 = DataFile(dataset=ds, filename="test_file.jpg") df2.save() dfo2 = DataFileObject(datafile=df2, storage_box=self.sbox1, uri=df2.filename) dfo2.save() df2.verify() self.assertFalse(dataset_online(ds))
def _build(dataset, filename, url=None): datafile = DataFile(dataset=dataset, filename=filename) datafile.save() if url is None: datafile.file_object = StringIO(u'bla') return datafile dfo = DataFileObject( datafile=datafile, storage_box=datafile.get_default_storage_box(), uri=url) dfo.save() # Tests are run with CELERY_ALWAYS_EAGER = True, # so saving a DFO will trigger an immediate attempt # to verify the DFO which will trigger an attempt # to apply filters because we are overriding the # USE_FILTERS setting to True in this test: self.assertNotEqual(mock_send_task.call_count, 0) return datafile
def _create_datafile(): user = User.objects.create_user('testuser', '*****@*****.**', 'pwd') user.save() full_access = Experiment.PUBLIC_ACCESS_FULL experiment = Experiment.objects.create(title="IIIF Test", created_by=user, public_access=full_access) experiment.save() ObjectACL(content_object=experiment, pluginId='django_user', entityId=str(user.id), isOwner=True, canRead=True, canWrite=True, canDelete=True, aclOwnershipType=ObjectACL.OWNER_OWNED).save() dataset = Dataset() dataset.save() dataset.experiments.add(experiment) dataset.save() # Create new Datafile tempfile = TemporaryUploadedFile('iiif_stored_file', None, None, None) with Image(filename='magick:rose') as img: img.format = 'tiff' img.save(file=tempfile.file) tempfile.file.flush() datafile = DataFile(dataset=dataset, size=os.path.getsize(tempfile.file.name), filename='iiif_named_file', mimetype='image/tiff') compute_md5 = getattr(settings, 'COMPUTE_MD5', True) compute_sha512 = getattr(settings, 'COMPUTE_SHA512', True) checksums = compute_checksums(open(tempfile.file.name, 'r'), compute_md5=compute_md5, compute_sha512=compute_sha512) if compute_md5: datafile.md5sum = checksums['md5sum'] if compute_sha512: datafile.sha512sum = checksums['sha512sum'] datafile.save() datafile.file_object = tempfile return datafile
def _build_datafile(self, testfile, filename, dataset, checksum=None, size=None, mimetype=''): filesize, sha512sum = get_size_and_sha512sum(testfile) datafile = DataFile(dataset=dataset, filename=filename, mimetype=mimetype, size=size if size is not None else filesize, sha512sum=(checksum if checksum else sha512sum)) datafile.save() dfo = DataFileObject(datafile=datafile, storage_box=datafile.get_default_storage_box()) dfo.save() with open(testfile, 'r') as sourcefile: dfo.file_object = sourcefile return DataFile.objects.get(pk=datafile.pk)
def create_datafile(index): testfile = path.join(base_path, 'middleware_test%d.txt' % index) size, sha512sum = get_size_and_sha512sum(testfile) datafile = DataFile(dataset=dataset, filename=path.basename(testfile), size=size, sha512sum=sha512sum) datafile.save() dfo = DataFileObject( datafile=datafile, storage_box=s_box, uri=path.basename(testfile)) dfo.save() if index != 1: dfo.verified = False dfo.save(update_fields=['verified']) return DataFile.objects.get(pk=datafile.pk)
def test_deleting_dfo_without_uri(self): dataset = Dataset(description="dataset description") dataset.save() save1 = settings.REQUIRE_DATAFILE_SIZES save2 = settings.REQUIRE_DATAFILE_CHECKSUMS try: settings.REQUIRE_DATAFILE_SIZES = False settings.REQUIRE_DATAFILE_CHECKSUMS = False datafile = DataFile(dataset=dataset, filename='test1.txt') datafile.save() finally: settings.REQUIRE_DATAFILE_SIZES = save1 settings.REQUIRE_DATAFILE_CHECKSUMS = save2 dfo = DataFileObject( datafile=datafile, storage_box=datafile.get_default_storage_box(), uri=None) dfo.save() self.assertIsNone(dfo.uri) self.assertIsNotNone(dfo.id) dfo.delete() self.assertIsNone(dfo.id)
def testLocalFile(self): content = urandom(1024) cf = ContentFile(content, 'background_task_testfile') # Create new Datafile datafile = DataFile(dataset=self.dataset) datafile.filename = cf.name datafile.size = len(content) datafile.sha512sum = hashlib.sha512(content).hexdigest() datafile.save() datafile.file_object = cf dfo = datafile.file_objects.all()[0] # undo auto-verify: dfo.verified = False dfo.save(update_fields=['verified']) # Check that it's not currently verified expect(datafile.verified).to_be(False) # Check it verifies verify_dfos() expect(datafile.verified).to_be(True)
def _create_datafile(): user = User.objects.create_user("testuser", "*****@*****.**", "pwd") user.save() full_access = Experiment.PUBLIC_ACCESS_FULL experiment = Experiment.objects.create(title="IIIF Test", created_by=user, public_access=full_access) experiment.save() ObjectACL( content_object=experiment, pluginId="django_user", entityId=str(user.id), isOwner=True, canRead=True, canWrite=True, canDelete=True, aclOwnershipType=ObjectACL.OWNER_OWNED, ).save() dataset = Dataset() dataset.save() dataset.experiments.add(experiment) dataset.save() # Create new Datafile tempfile = TemporaryUploadedFile("iiif_stored_file", None, None, None) with Image(filename="magick:rose") as img: img.format = "tiff" img.save(file=tempfile.file) tempfile.file.flush() datafile = DataFile( dataset=dataset, size=os.path.getsize(tempfile.file.name), filename="iiif_named_file", mimetype="image/tiff" ) checksums = compute_checksums(open(tempfile.file.name, "r")) datafile.md5sum = checksums["md5sum"] datafile.sha512sum = checksums["sha512sum"] datafile.save() datafile.file_object = tempfile return datafile
def test_002_update_df_status_skip_unverified(self, mock_stat, df_online): """update_df_status should skip files that are unverified""" df2 = DataFile(dataset=self.dataset, filename="test_df2.jpg") df2.save() dfo2 = DataFileObject(datafile=df2, storage_box=self.sbox1, uri="stream/test_df2.jpg") dfo2.save() schema = Schema.objects.get(namespace=HSM_DATAFILE_NAMESPACE) ps2 = DatafileParameterSet(schema=schema, datafile=df2) ps2.save() param_name = ParameterName.objects.get(schema=schema, name="online") param2 = DatafileParameter(parameterset=ps2, name=param_name) param2.string_value = True param2.save() mock_stat.return_value = Stats(st_size=10000, st_blocks=100, st_mtime=datetime.now()) update_df_status() df_online.assert_not_called()
class ViewTemplateContextsTest(TestCase): def setUp(self): """ setting up essential objects, copied from tests above """ user = '******' pwd = 'secret' email = '' self.user = User.objects.create_user(user, email, pwd) self.userProfile = self.user.userprofile self.exp = Experiment(title='test exp1', institution_name='monash', created_by=self.user) self.exp.save() self.acl = ObjectACL( pluginId=django_user, entityId=str(self.user.id), content_object=self.exp, canRead=True, isOwner=True, aclOwnershipType=ObjectACL.OWNER_OWNED, ) self.acl.save() self.dataset = Dataset(description='dataset description...') self.dataset.save() self.dataset.experiments.add(self.exp) self.dataset.save() self.datafile = DataFile(dataset=self.dataset, size=42, filename="foo", md5sum="junk") self.datafile.save() def tearDown(self): self.user.delete() self.exp.delete() self.dataset.delete() self.datafile.delete() self.acl.delete() def testExperimentView(self): """ test some template context parameters for an experiment view """ from tardis.tardis_portal.views import ExperimentView from django.http import HttpRequest import sys # Default behavior views_module = flexmock(sys.modules['tardis.tardis_portal.views']) request = HttpRequest() request.method = 'GET' request.user = self.user request.groups = [] context = { 'organization': ['test', 'test2'], 'default_organization': 'test', 'default_format': 'tar', 'protocol': [['tgz', '/download/experiment/1/tgz/'], ['tar', '/download/experiment/1/tar/']] } views_module.should_call('render_response_index'). \ with_args(_AnyMatcher(), "tardis_portal/view_experiment.html", _ContextMatcher(context)) view_fn = ExperimentView.as_view() response = view_fn(request, experiment_id=self.exp.id) self.assertEqual(response.status_code, 200) # Behavior with USER_AGENT_SENSING enabled and a request.user_agent saved_setting = getattr(settings, "USER_AGENT_SENSING", None) try: setattr(settings, "USER_AGENT_SENSING", True) request = HttpRequest() request.method = 'GET' request.user = self.user request.groups = [] mock_agent = _MiniMock(os=_MiniMock(family="Macintosh")) setattr(request, 'user_agent', mock_agent) context = { 'organization': ['classic', 'test', 'test2'], 'default_organization': 'classic', 'default_format': 'tar', 'protocol': [['tar', '/download/experiment/1/tar/']] } views_module.should_call('render_response_index'). \ with_args(_AnyMatcher(), "tardis_portal/view_experiment.html", _ContextMatcher(context)) view_fn = ExperimentView.as_view() response = view_fn(request, experiment_id=self.exp.id) self.assertEqual(response.status_code, 200) finally: if saved_setting is not None: setattr(settings, "USER_AGENT_SENSING", saved_setting) else: delattr(settings, "USER_AGENT_SENSING") def testDatasetView(self): """ test some context parameters for a dataset view """ from tardis.tardis_portal.views import DatasetView from django.http import HttpRequest import sys views_module = flexmock(sys.modules['tardis.tardis_portal.views']) request = HttpRequest() request.method = 'GET' request.user = self.user request.groups = [] context = {'default_organization': 'test', 'default_format': 'tar'} views_module.should_call('render_response_index'). \ with_args(_AnyMatcher(), "tardis_portal/view_dataset.html", _ContextMatcher(context)) view_fn = DatasetView.as_view() response = view_fn(request, dataset_id=self.dataset.id) self.assertEqual(response.status_code, 200) # Behavior with USER_AGENT_SENSING enabled and a request.user_agent saved_setting = getattr(settings, "USER_AGENT_SENSING", None) try: setattr(settings, "USER_AGENT_SENSING", True) request = HttpRequest() request.method = 'GET' request.user = self.user request.groups = [] mock_agent = _MiniMock(os=_MiniMock(family="Macintosh")) setattr(request, 'user_agent', mock_agent) context = { 'default_organization': 'classic', 'default_format': 'tar' } views_module.should_call('render_response_index'). \ with_args(_AnyMatcher(), "tardis_portal/view_dataset.html", _ContextMatcher(context)) view_fn = DatasetView.as_view() response = view_fn(request, dataset_id=self.dataset.id) self.assertEqual(response.status_code, 200) finally: if saved_setting is not None: setattr(settings, "USER_AGENT_SENSING", saved_setting) else: delattr(settings, "USER_AGENT_SENSING")
def test_parameter(self): exp = Experiment( title='test exp1', institution_name='Australian Synchrotron', approved=True, created_by=self.user, public_access=Experiment.PUBLIC_ACCESS_NONE, ) exp.save() dataset = Dataset(description="dataset description") dataset.save() dataset.experiments.add(exp) dataset.save() df_file = DataFile(dataset=dataset, filename='file.txt', size=42, md5sum='bogus') df_file.save() df_schema = Schema(namespace='http://www.cern.ch/felzmann/schema1.xml', type=Schema.DATAFILE) df_schema.save() ds_schema = Schema(namespace='http://www.cern.ch/felzmann/schema2.xml', type=Schema.DATASET) ds_schema.save() exp_schema = Schema( namespace='http://www.cern.ch/felzmann/schema3.xml', type=Schema.EXPERIMENT) exp_schema.save() df_parname = ParameterName(schema=df_schema, name='name', full_name='full_name', units='image/jpg', data_type=ParameterName.FILENAME) df_parname.save() ds_parname = ParameterName(schema=ds_schema, name='name', full_name='full_name', units='image/jpg', data_type=ParameterName.FILENAME) ds_parname.save() exp_parname = ParameterName(schema=exp_schema, name='name', full_name='full_name', units='image/jpg', data_type=ParameterName.FILENAME) exp_parname.save() df_parset = DatafileParameterSet(schema=df_schema, datafile=df_file) df_parset.save() ds_parset = DatasetParameterSet(schema=ds_schema, dataset=dataset) ds_parset.save() exp_parset = ExperimentParameterSet(schema=exp_schema, experiment=exp) exp_parset.save() with self.settings(METADATA_STORE_PATH=os.path.dirname(__file__)): filename = 'test.jpg' df_parameter = DatafileParameter(name=df_parname, parameterset=df_parset, string_value=filename) df_parameter.save() ds_parameter = DatasetParameter(name=ds_parname, parameterset=ds_parset, string_value=filename) ds_parameter.save() exp_parameter = ExperimentParameter(name=exp_parname, parameterset=exp_parset, string_value=filename) exp_parameter.save() self.assertEqual( "<a href='/display/DatafileImage/load/%i/' target='_blank'><img style='width: 300px;' src='/display/DatafileImage/load/%i/' /></a>" % # noqa (df_parameter.id, df_parameter.id), df_parameter.get()) self.assertEqual( "<a href='/display/DatasetImage/load/%i/' target='_blank'><img style='width: 300px;' src='/display/DatasetImage/load/%i/' /></a>" % # noqa (ds_parameter.id, ds_parameter.id), ds_parameter.get()) self.assertEqual( "<a href='/display/ExperimentImage/load/%i/' target='_blank'><img style='width: 300px;' src='/display/ExperimentImage/load/%i/' /></a>" % # noqa (exp_parameter.id, exp_parameter.id), exp_parameter.get())
class ParameterSetManagerTestCase(TestCase): def setUp(self): from django.contrib.auth.models import User from tempfile import mkdtemp user = '******' pwd = 'secret' email = '' self.user = User.objects.create_user(user, email, pwd) self.test_dir = mkdtemp() self.exp = Experiment(title='test exp1', institution_name='monash', created_by=self.user) self.exp.save() self.dataset = Dataset(description="dataset description...") self.dataset.save() self.dataset.experiments.add(self.exp) self.dataset.save() self.datafile = DataFile(dataset=self.dataset, filename="testfile.txt", size="42", md5sum='bogus') self.datafile.save() self.dfo = DataFileObject( datafile=self.datafile, storage_box=self.datafile.get_default_storage_box(), uri="1/testfile.txt") self.dfo.save() self.schema = Schema( namespace="http://localhost/psmtest/df/", name="Parameter Set Manager", type=3) self.schema.save() self.parametername1 = ParameterName( schema=self.schema, name="parameter1", full_name="Parameter 1") self.parametername1.save() self.parametername2 = ParameterName( schema=self.schema, name="parameter2", full_name="Parameter 2", data_type=ParameterName.NUMERIC) self.parametername2.save() self.parametername3 = ParameterName( schema=self.schema, name="parameter3", full_name="Parameter 3", data_type=ParameterName.DATETIME) self.parametername3.save() self.datafileparameterset = DatafileParameterSet( schema=self.schema, datafile=self.datafile) self.datafileparameterset.save() self.datafileparameter1 = DatafileParameter( parameterset=self.datafileparameterset, name=self.parametername1, string_value="test1") self.datafileparameter1.save() self.datafileparameter2 = DatafileParameter( parameterset=self.datafileparameterset, name=self.parametername2, numerical_value=2) self.datafileparameter2.save() # Create a ParameterName and Parameter of type LINK to an experiment self.parametername_exp_link = ParameterName( schema=self.schema, name="exp_link", full_name="This parameter is a experiment LINK", data_type=ParameterName.LINK) self.parametername_exp_link.save() self.exp_link_param = DatafileParameter( parameterset=self.datafileparameterset, name=self.parametername_exp_link) exp_url = self.exp.get_absolute_url() # /experiment/view/1/ self.exp_link_param.set_value(exp_url) self.exp_link_param.save() # Create a ParameterName and Parameter of type LINK to a dataset self.parametername_dataset_link = ParameterName( schema=self.schema, name="dataset_link", full_name="This parameter is a dataset LINK", data_type=ParameterName.LINK) self.parametername_dataset_link.save() self.dataset_link_param = DatafileParameter( parameterset=self.datafileparameterset, name=self.parametername_dataset_link) dataset_url = self.dataset.get_absolute_url() # /dataset/1/ self.dataset_link_param.set_value(dataset_url) self.dataset_link_param.save() # Create a ParameterName type LINK to an unresolvable (non-URL) # free-text value self.parametername_unresolvable_link = ParameterName( schema=self.schema, name="freetext_link", full_name="This parameter is a non-URL LINK", data_type=ParameterName.LINK) self.parametername_unresolvable_link.save() def tearDown(self): self.exp.delete() self.user.delete() self.parametername1.delete() self.parametername2.delete() self.parametername3.delete() self.parametername_exp_link.delete() self.parametername_dataset_link.delete() self.parametername_unresolvable_link.delete() self.schema.delete() def test_existing_parameterset(self): psm = ParameterSetManager(parameterset=self.datafileparameterset) self.assertTrue(psm.get_schema().namespace == "http://localhost/psmtest/df/") self.assertTrue(psm.get_param("parameter1").string_value == "test1") self.assertTrue(psm.get_param("parameter2", True) == 2) def test_new_parameterset(self): psm = ParameterSetManager(parentObject=self.datafile, schema="http://localhost/psmtest/df2/") self.assertTrue(psm.get_schema().namespace == "http://localhost/psmtest/df2/") psm.set_param("newparam1", "test3", "New Parameter 1") self.assertTrue(psm.get_param("newparam1").string_value == "test3") self.assertTrue(psm.get_param("newparam1").name.full_name == "New Parameter 1") psm.new_param("newparam1", "test4") self.assertTrue(len(psm.get_params("newparam1", True)) == 2) psm.set_param_list("newparam2", ("a", "b", "c", "d")) self.assertTrue(len(psm.get_params("newparam2")) == 4) psm.set_params_from_dict( {"newparam2": "test5", "newparam3": 3}) self.assertTrue(psm.get_param("newparam2", True) == "test5") # the newparam3 gets created and '3' is set to a string_value # since once cannot assume that an initial numeric value # will imply continuing numeric type for this new param self.assertTrue(psm.get_param("newparam3").string_value == '3') psm.delete_params("newparam1") self.assertTrue(len(psm.get_params("newparam1", True)) == 0) def test_link_parameter_type(self): """ Test that Parameter.link_gfk (GenericForeignKey) is correctly assigned after using Parameter.set_value(some_url) for a LINK Parameter. """ psm = ParameterSetManager(parameterset=self.datafileparameterset) # Check link to experiment exp_url = self.exp.get_absolute_url() # /experiment/view/1/ self.assertTrue(psm.get_param("exp_link").string_value == exp_url) self.assertTrue(psm.get_param("exp_link").link_id == self.exp.id) exp_ct = ContentType.objects.get(model__iexact="experiment") self.assertTrue(psm.get_param("exp_link").link_ct == exp_ct) self.assertTrue(psm.get_param("exp_link").link_gfk == self.exp) # Check link to dataset dataset_url = self.dataset.get_absolute_url() # /dataset/1/ self.assertTrue(psm.get_param("dataset_link").string_value == dataset_url) self.assertTrue(psm.get_param("dataset_link").link_id == self.dataset.id) dataset_ct = ContentType.objects.get(model__iexact="dataset") self.assertTrue(psm.get_param("dataset_link").link_ct == dataset_ct) self.assertTrue(psm.get_param("dataset_link").link_gfk == self.dataset) def test_link_parameter_type_extra(self): # make a second ParameterSet for testing some variations # in URL values self.datafileparameterset2 = DatafileParameterSet( schema=self.schema, datafile=self.datafile) self.datafileparameterset2.save() psm = ParameterSetManager(parameterset=self.datafileparameterset2) self.dataset_link_param2 = DatafileParameter( parameterset=self.datafileparameterset2, name=self.parametername_dataset_link) # /dataset/1 - no trailing slash dataset_url = self.dataset.get_absolute_url() self.dataset_link_param2.set_value(dataset_url) self.dataset_link_param2.save() # Check link_id/link_ct/link_gfk to dataset self.assertTrue(psm.get_param("dataset_link").link_id == self.dataset.id) dataset_ct = ContentType.objects.get(model__iexact="dataset") self.assertTrue(psm.get_param("dataset_link").link_ct == dataset_ct) self.assertTrue(psm.get_param("dataset_link").link_gfk == self.dataset) # Test links of the form /api/v1/experiment/<experiment_id>/ self.exp_link_param2 = DatafileParameter( parameterset=self.datafileparameterset2, name=self.parametername_exp_link) exp_url = '/api/v1/experiment/%s/' % self.exp.id self.exp_link_param2.set_value(exp_url) self.exp_link_param2.save() # Check link_id/link_ct/link_gfk to experiment self.assertTrue(psm.get_param("exp_link").link_id == self.exp.id) exp_ct = ContentType.objects.get(model__iexact="experiment") self.assertTrue(psm.get_param("exp_link").link_ct == exp_ct) self.assertTrue(psm.get_param("exp_link").link_gfk == self.exp) def test_unresolvable_link_parameter(self): """ Test that LINK Parameters that can't be resolved to a model (including non-URL values) still work. """ self.datafileparameterset3 = DatafileParameterSet( schema=self.schema, datafile=self.datafile) self.datafileparameterset3.save() psm = ParameterSetManager(parameterset=self.datafileparameterset3) # Create a Parameter of type LINK to an unresolvable (non-URL) # free-text value self.freetext_link_param = DatafileParameter( parameterset=self.datafileparameterset3, name=self.parametername_unresolvable_link) self.assertRaises(SuspiciousOperation, lambda: self.freetext_link_param.set_value( "FREETEXT_ID_123")) def test_tz_naive_date_handling(self): """ Ensure that dates are handling in a timezone-aware way. """ psm = ParameterSetManager(parameterset=self.datafileparameterset) psm.new_param("parameter3", str(datetime(1970, 01, 01, 10, 0, 0))) expect(psm.get_param("parameter3", True))\ .to_equal(datetime(1970, 01, 01, 0, 0, 0, tzinfo=pytz.utc)) def test_tz_aware_date_handling(self): """ Ensure that dates are handling in a timezone-aware way. """ psm = ParameterSetManager(parameterset=self.datafileparameterset) psm.new_param("parameter3", '1970-01-01T08:00:00+08:00') expect(psm.get_param("parameter3", True))\ .to_equal(datetime(1970, 01, 01, 0, 0, 0, tzinfo=pytz.utc))
class SimpleSearchTest(MyTardisResourceTestCase): def setUp(self): super(SimpleSearchTest, self).setUp() self.out = StringIO() call_command('search_index', stdout=self.out, action='delete', force=True) call_command('search_index', stdout=self.out, action='rebuild', force=True) # add dataset and datafile to experiment self.dataset1 = Dataset(description='test_dataset') self.dataset1.save() self.dataset1.experiments.add(self.testexp) self.dataset1.save() settings.REQUIRE_DATAFILE_SIZES = False settings.REQUIRE_DATAFILE_CHECKSUMS = False self.datafile = DataFile(dataset=self.dataset1, filename='test.txt') self.datafile.save() def test_simple_search_authenticated_user(self): response = self.api_client.get( '/api/v1/search_simple-search/?query=test', authentication=self.get_credentials()) data = json.loads(response.content.decode()) self.assertEqual(len(data['objects'][0]['hits']['experiments']), 1) self.assertEqual(len(data['objects'][0]['hits']['datasets']), 1) self.assertEqual(len(data['objects'][0]['hits']['datafiles']), 1) def test_simple_search_unauthenticated_user(self): self.testexp.public_access = 100 self.testexp.save() response = self.api_client.get( '/api/v1/search_simple-search/?query=test') data = json.loads(response.content.decode()) self.assertEqual(len(data['objects'][0]['hits']['experiments']), 1) self.assertEqual(len(data['objects'][0]['hits']['datasets']), 1) self.assertEqual(len(data['objects'][0]['hits']['datafiles']), 1) def test_advance_search_unauthenticated_user(self): self.testexp.public_access = 100 self.testexp.save() response = self.api_client.post( '/api/v1/search_advance-search/', data={ "text": "test", "TypeTag": ["Dataset", "Experiment", "Datafile"] }) data = json.loads(response.content.decode()) self.assertEqual(len(data['hits']['experiments']), 1) self.assertEqual(len(data['hits']['datasets']), 1) self.assertEqual(len(data['hits']['datafiles']), 1) def test_advance_search_authenticated_user(self): response = self.api_client.post( '/api/v1/search_advance-search/', data={ "text": "test", "TypeTag": ["Dataset", "Experiment", "Datafile"] }, authentication=self.get_credentials()) data = json.loads(response.content.decode()) self.assertEqual(len(data['hits']['experiments']), 1) self.assertEqual(len(data['hits']['datasets']), 1) self.assertEqual(len(data['hits']['datafiles']), 1)
class TestMytardisHSM(TestCase): """Tests for `mytardis_hsm` package.""" def setUp(self): """Setup test fixtures if needed.""" self.user = User.objects.create_user("doctor", '', "pwd") self.exp = Experiment(title="Wonderful", institution_name="Monash University", created_by=self.user) self.exp.save() group = Group(name="Group1") group.save() facility = Facility(name="Test Facility", manager_group=group) facility.save() self.inst = Instrument(name="Test Instrument1", facility=facility) self.inst.save() self.dataset = Dataset(description="Dataset1", instrument=self.inst) self.dataset.save() storage_classes = getattr(settings, "HSM_STORAGE_CLASSES", DEFAULT_HSM_CLASSES) self.sbox1 = StorageBox(name="SBOX1", django_storage_class=storage_classes[0], status='online', max_size=256) self.sbox1.save() sbox1_attr = StorageBoxAttribute(storage_box=self.sbox1, key='type', value=StorageBox.DISK) sbox1_attr.save() sbox1_loc_opt = StorageBoxOption(storage_box=self.sbox1, key="location", value="/dummy/path") sbox1_loc_opt.save() self.sbox2 = StorageBox( name="SBOX2", django_storage_class="any.non.disk.StorageSystem", status='offline', max_size=256) self.sbox2.save() sbox2_attr = StorageBoxAttribute(storage_box=self.sbox2, key='type', value=StorageBox.TAPE) sbox2_attr.save() self.df1 = DataFile(dataset=self.dataset, filename="test_df.jpg") self.df1.save() self.dfo1 = DataFileObject(datafile=self.df1, storage_box=self.sbox1, uri="stream/test.jpg", verified=True) self.dfo1.save() self.df1.verify() def tearDown(self): """Tear down test fixtures, if any.""" def test_000_number_of_users(self): """Checks the number of users in the database""" user = User.objects.all().count() self.assertEqual(user, 1) def test_001_number_of_experiments(self): """Creates an experiments, saves it and then checks the number of experiments in the database is equal to 1. """ exps = Experiment.objects.all() self.assertEqual(exps.count(), 1) def test_003_number_of_datasets(self): """Checks that number of experiments is equal to 1 """ self.assertEqual(Dataset.objects.all().count(), 1) @mock.patch("os.stat") def test_003_dfo_online(self, mock_stat): """HSM.online should return True when a DFOs underlying file has > 0 blocks""" mock_stat.return_value = Stats(st_size=10000, st_blocks=100, st_mtime=datetime.now()) self.assertTrue(dfo_online(self.dfo1)) @mock.patch("os.stat") def test_004_dfo_offline(self, mock_stat): """HSM.online should return False when a DFOs underlying file is > 350 bytes and 0 blocks""" mock_stat.return_value = Stats(st_size=10000, st_blocks=0, st_mtime=datetime.now()) self.assertFalse(dfo_online(self.dfo1)) def test_005_dfo_non_disk(self): """Files in StorageBoxes with a django_storage_class other than those specified in settings should not be processed""" dfo2 = DataFileObject(datafile=self.df1, storage_box=self.sbox2, uri="stream/test.jpg", verified=True) self.assertRaises(StorageClassNotSupportedError, dfo_online, dfo2) with self.settings(HSM_STORAGE_CLASSES=["random.storage.CLASS"]): self.assertRaises(StorageClassNotSupportedError, dfo_online, self.dfo1) def test_006_hsm_schema(self): """HSM schema should be installed""" schemas = Schema.objects\ .filter(namespace="http://tardis.edu.au/schemas/hsm/datafile/1")\ .count() self.assertEqual(schemas, 1) def test_007_dfo_unverified(self): """df_online and dfo_online should raise Exception for an unverfied DataFile or DataFileObject, respectively""" df2 = DataFile(dataset=self.dataset, filename="test_df.jpg") df2.save() self.assertRaises(DataFileNotVerified, df_online, df2) dfo2 = DataFileObject(datafile=df2, storage_box=self.sbox1, uri="stream/test.jpg", verified=False) dfo2.save() self.assertRaises(DataFileObjectNotVerified, dfo_online, dfo2)
class MyTardisHSMUtilsTestCase(TestCase): """Test cases for the MyTardisHSM utils module""" def setUp(self): """Setup test fixtures if needed.""" self.user = User.objects.create_user("doctor", '', "pwd") self.exp = Experiment(title="Wonderful", institution_name="Monash University", created_by=self.user) self.exp.save() group = Group(name="Group1") group.save() facility = Facility(name="Test Facility", manager_group=group) facility.save() inst = Instrument(name="Test Instrument1", facility=facility) inst.save() self.dataset = Dataset(description="Dataset1", instrument=inst) self.dataset.save() storage_classes = getattr(settings, "HSM_STORAGE_CLASSES", DEFAULT_HSM_CLASSES) self.sbox1 = StorageBox(name="SBOX1", django_storage_class=storage_classes[0], status='online', max_size=256) self.sbox1.save() sbox1_attr = StorageBoxAttribute(storage_box=self.sbox1, key='type', value=StorageBox.DISK) sbox1_attr.save() sbox1_loc_opt = StorageBoxOption(storage_box=self.sbox1, key="location", value="/dummy/path") sbox1_loc_opt.save() self.sbox2 = StorageBox( name="SBOX2", django_storage_class="any.non.disk.StorageSystem", status='offline', max_size=256) self.sbox2.save() sbox2_attr = StorageBoxAttribute(storage_box=self.sbox2, key='type', value=StorageBox.TAPE) sbox2_attr.save() self.df1 = DataFile(dataset=self.dataset, filename="test_df.jpg") self.df1.save() self.dfo1 = DataFileObject(datafile=self.df1, storage_box=self.sbox1, uri="stream/test.jpg") self.dfo1.save() def tearDown(self): """Remove stuff""" def test_000_lock_datafile(self): """We should be able to lock a datafile to prevent concurrent access""" with DatafileLock(self.df1, "dummy_oid1") as lock1: if lock1: with DatafileLock(self.df1, "dummy_oid2") as lock2: self.assertTrue(lock1) self.assertFalse(lock2) def test_001_datafile_lock_expiry(self): """A datafile lock should not release until the expiry has been reached""" with DatafileLock(self.df1, "dummy_oid1", expires=2) as lock1: self.assertTrue(lock1) # If we retry lock right away, lock acquisition should fail because # expiry hasn't been reached with DatafileLock(self.df1, "dummy_oid1_1") as lock1: self.assertFalse(lock1) # wait 2s for lock to release time.sleep(2) # If we retry acquiring the lock now it should succeed with DatafileLock(self.df1, "dummy_oid1_2") as lock1: self.assertTrue(lock1)
class IndexExperimentTestCase(TestCase): def setUp(self): user = '******' pwd = 'secret' email = '' self.user = User.objects.create_user(user, email, pwd) self.out = StringIO() call_command('search_index', stdout=self.out, action='delete', force=True) def test_create_index(self): self.exp1 = Experiment(title='test exp1', institution_name='monash', description='Test Description', created_by=self.user) self.exp2 = Experiment(title='test exp2', institution_name='monash', description='Test Description', created_by=self.user) self.exp1.save() self.exp2.save() # get search instance search = ExperimentDocument.search() # query for title(exact matching) query = search.query("match", title='test exp1') result = query.execute(ignore_cache=True) self.assertEqual(result.hits[0].title, 'test exp1') # query for description query = search.query("match", description='Test Description') result = query.execute(ignore_cache=True) self.assertEqual(result.hits[0].description, 'Test Description') # query for created_time query = search.query("match", created_time=self.exp1.created_time) result = query.execute(ignore_cache=True) self.assertEqual(result.hits[0].created_time, self.exp1.created_time) # dataset # dataset1 belongs to experiment1 self.dataset1 = Dataset(description='test_dataset') self.dataset1.save() self.dataset1.experiments.add(self.exp1) self.dataset1.save() # dataset2 belongs to experiment2 self.dataset2 = Dataset(description='test_dataset2') self.dataset2.save() self.dataset2.experiments.add(self.exp2) self.dataset2.save() # search on dataset search = DatasetDocument.search() query = search.query("match", description='test_dataset') result = query.execute(ignore_cache=True) self.assertEqual(result.hits.total.value, 1) # search on datafile settings.REQUIRE_DATAFILE_SIZES = False settings.REQUIRE_DATAFILE_CHECKSUMS = False self.datafile = DataFile(dataset=self.dataset1, filename='test.txt') self.datafile.save() search = DataFileDocument.search() query = search.query("match", filename='test.txt') result = query.execute(ignore_cache=True) self.assertEqual(result.hits[0].filename, self.datafile.filename)
class ViewTemplateContextsTest(TestCase): def setUp(self): """ setting up essential objects, copied from tests above """ user = '******' pwd = 'secret' email = '' self.user = User.objects.create_user(user, email, pwd) self.userProfile = self.user.userprofile self.exp = Experiment(title='test exp1', institution_name='monash', created_by=self.user) self.exp.save() self.acl = ObjectACL( pluginId=django_user, entityId=str(self.user.id), content_object=self.exp, canRead=True, isOwner=True, aclOwnershipType=ObjectACL.OWNER_OWNED, ) self.acl.save() self.dataset = Dataset(description='dataset description...') self.dataset.save() self.dataset.experiments.add(self.exp) self.dataset.save() self.datafile = DataFile(dataset=self.dataset, size=42, filename="foo", md5sum="junk") self.datafile.save() def tearDown(self): self.user.delete() self.exp.delete() self.dataset.delete() self.datafile.delete() self.acl.delete() def testExperimentView(self): """ test some template context parameters for an experiment view """ from tardis.tardis_portal.views import ExperimentView from django.http import HttpRequest import sys # Default behavior views_module = flexmock(sys.modules['tardis.tardis_portal.views']) request = HttpRequest() request.method = 'GET' request.user = self.user request.groups = [] context = {'organization': ['test', 'test2'], 'default_organization': 'test', 'default_format': 'tar', 'protocol': [['tgz', '/download/experiment/1/tgz/'], ['tar', '/download/experiment/1/tar/']]} views_module.should_call('render_response_index'). \ with_args(_AnyMatcher(), "tardis_portal/view_experiment.html", _ContextMatcher(context)) view_fn = ExperimentView.as_view() response = view_fn(request, experiment_id=self.exp.id) self.assertEqual(response.status_code, 200) # Behavior with USER_AGENT_SENSING enabled and a request.user_agent saved_setting = getattr(settings, "USER_AGENT_SENSING", None) try: setattr(settings, "USER_AGENT_SENSING", True) request = HttpRequest() request.method = 'GET' request.user = self.user request.groups = [] mock_agent = _MiniMock(os=_MiniMock(family="Macintosh")) setattr(request, 'user_agent', mock_agent) context = {'organization': ['classic', 'test', 'test2'], 'default_organization': 'classic', 'default_format': 'tar', 'protocol': [['tar', '/download/experiment/1/tar/']]} views_module.should_call('render_response_index'). \ with_args(_AnyMatcher(), "tardis_portal/view_experiment.html", _ContextMatcher(context)) view_fn = ExperimentView.as_view() response = view_fn(request, experiment_id=self.exp.id) self.assertEqual(response.status_code, 200) finally: if saved_setting is not None: setattr(settings, "USER_AGENT_SENSING", saved_setting) else: delattr(settings, "USER_AGENT_SENSING") def testDatasetView(self): """ test some context parameters for a dataset view """ from tardis.tardis_portal.views import DatasetView from django.http import HttpRequest import sys views_module = flexmock(sys.modules['tardis.tardis_portal.views']) request = HttpRequest() request.method = 'GET' request.user = self.user request.groups = [] context = {'default_organization': 'test', 'default_format': 'tar'} views_module.should_call('render_response_index'). \ with_args(_AnyMatcher(), "tardis_portal/view_dataset.html", _ContextMatcher(context)) view_fn = DatasetView.as_view() response = view_fn(request, dataset_id=self.dataset.id) self.assertEqual(response.status_code, 200) # Behavior with USER_AGENT_SENSING enabled and a request.user_agent saved_setting = getattr(settings, "USER_AGENT_SENSING", None) try: setattr(settings, "USER_AGENT_SENSING", True) request = HttpRequest() request.method = 'GET' request.user = self.user request.groups = [] mock_agent = _MiniMock(os=_MiniMock(family="Macintosh")) setattr(request, 'user_agent', mock_agent) context = {'default_organization': 'classic', 'default_format': 'tar'} views_module.should_call('render_response_index'). \ with_args(_AnyMatcher(), "tardis_portal/view_dataset.html", _ContextMatcher(context)) view_fn = DatasetView.as_view() response = view_fn(request, dataset_id=self.dataset.id) self.assertEqual(response.status_code, 200) finally: if saved_setting is not None: setattr(settings, "USER_AGENT_SENSING", saved_setting) else: delattr(settings, "USER_AGENT_SENSING")
class ContextualViewTest(TestCase): def setUp(self): """ setting up essential objects, copied from tests above """ user = '******' pwd = 'secret' email = '' self.user = User.objects.create_user(user, email, pwd) self.userProfile = self.user.userprofile self.exp = Experiment(title='test exp1', institution_name='monash', created_by=self.user) self.exp.save() self.acl = ObjectACL( pluginId=django_user, entityId=str(self.user.id), content_object=self.exp, canRead=True, isOwner=True, aclOwnershipType=ObjectACL.OWNER_OWNED, ) self.acl.save() self.dataset = Dataset(description='dataset description...') self.dataset.save() self.dataset.experiments.add(self.exp) self.dataset.save() self.datafile = DataFile(dataset=self.dataset, size=42, filename="foo", md5sum="junk") self.datafile.save() self.testschema = Schema(namespace="http://test.com/test/schema", name="Test View", type=Schema.DATAFILE, hidden=True) self.testschema.save() self.dfps = DatafileParameterSet(datafile=self.datafile, schema=self.testschema) self.dfps.save() def tearDown(self): self.user.delete() self.exp.delete() self.dataset.delete() self.datafile.delete() self.testschema.delete() self.dfps.delete() self.acl.delete() def testDetailsDisplay(self): """ test display of view for an existing schema and no display for an undefined one. """ from tardis.tardis_portal.views import display_datafile_details request = flexmock(user=self.user, groups=[("testgroup", flexmock())]) with self.settings(DATAFILE_VIEWS=[( "http://test.com/test/schema", "/test/url"), ("http://does.not.exist", "/false/url")]): response = display_datafile_details(request, datafile_id=self.datafile.id) self.assertEqual(response.status_code, 200) self.assertTrue("/ajax/parameters/" in response.content) self.assertTrue("/test/url" in response.content) self.assertFalse("/false/url" in response.content)
class ContextualViewTest(TestCase): def setUp(self): """ setting up essential objects, copied from tests above """ user = '******' pwd = 'secret' email = '' self.user = User.objects.create_user(user, email, pwd) self.userProfile = self.user.userprofile self.exp = Experiment(title='test exp1', institution_name='monash', created_by=self.user) self.exp.save() self.acl = ObjectACL( pluginId=django_user, entityId=str(self.user.id), content_object=self.exp, canRead=True, isOwner=True, aclOwnershipType=ObjectACL.OWNER_OWNED, ) self.acl.save() self.dataset = Dataset(description='dataset description...') self.dataset.save() self.dataset.experiments.add(self.exp) self.dataset.save() self.datafile = DataFile(dataset=self.dataset, size=42, filename="foo", md5sum="junk") self.datafile.save() self.testschema = Schema(namespace="http://test.com/test/schema", name="Test View", type=Schema.DATAFILE, hidden=True) self.testschema.save() self.dfps = DatafileParameterSet(datafile=self.datafile, schema=self.testschema) self.dfps.save() def tearDown(self): self.user.delete() self.exp.delete() self.dataset.delete() self.datafile.delete() self.testschema.delete() self.dfps.delete() self.acl.delete() def testDetailsDisplay(self): """ test display of view for an existing schema and no display for an undefined one. """ from tardis.tardis_portal.views import display_datafile_details request = flexmock(user=self.user, groups=[("testgroup", flexmock())]) with self.settings(DATAFILE_VIEWS=[ ("http://test.com/test/schema", "/test/url"), ("http://does.not.exist", "/false/url")]): response = display_datafile_details( request, datafile_id=self.datafile.id) self.assertEqual(response.status_code, 200) self.assertTrue("/ajax/parameters/" in response.content) self.assertTrue("/test/url" in response.content) self.assertFalse("/false/url" in response.content)
class ParameterSetManagerTestCase(TestCase): def setUp(self): from django.contrib.auth.models import User from tempfile import mkdtemp user = '******' pwd = 'secret' email = '' self.user = User.objects.create_user(user, email, pwd) self.test_dir = mkdtemp() self.exp = Experiment(title='test exp1', institution_name='monash', created_by=self.user) self.exp.save() self.dataset = Dataset(description="dataset description...") self.dataset.save() self.dataset.experiments.add(self.exp) self.dataset.save() self.datafile = DataFile(dataset=self.dataset, filename="testfile.txt", size="42", md5sum='bogus') self.datafile.save() self.dfo = DataFileObject( datafile=self.datafile, storage_box=self.datafile.get_default_storage_box(), uri="1/testfile.txt") self.dfo.save() self.schema = Schema(namespace="http://localhost/psmtest/df/", name="Parameter Set Manager", type=3) self.schema.save() self.parametername1 = ParameterName(schema=self.schema, name="parameter1", full_name="Parameter 1") self.parametername1.save() self.parametername2 = ParameterName(schema=self.schema, name="parameter2", full_name="Parameter 2", data_type=ParameterName.NUMERIC) self.parametername2.save() self.parametername3 = ParameterName(schema=self.schema, name="parameter3", full_name="Parameter 3", data_type=ParameterName.DATETIME) self.parametername3.save() self.datafileparameterset = DatafileParameterSet( schema=self.schema, datafile=self.datafile) self.datafileparameterset.save() self.datafileparameter1 = DatafileParameter( parameterset=self.datafileparameterset, name=self.parametername1, string_value="test1") self.datafileparameter1.save() self.datafileparameter2 = DatafileParameter( parameterset=self.datafileparameterset, name=self.parametername2, numerical_value=2) self.datafileparameter2.save() # Create a ParameterName and Parameter of type LINK to an experiment self.parametername_exp_link = ParameterName( schema=self.schema, name="exp_link", full_name="This parameter is a experiment LINK", data_type=ParameterName.LINK) self.parametername_exp_link.save() self.exp_link_param = DatafileParameter( parameterset=self.datafileparameterset, name=self.parametername_exp_link) exp_url = self.exp.get_absolute_url() # /experiment/view/1/ self.exp_link_param.set_value(exp_url) self.exp_link_param.save() # Create a ParameterName and Parameter of type LINK to a dataset self.parametername_dataset_link = ParameterName( schema=self.schema, name="dataset_link", full_name="This parameter is a dataset LINK", data_type=ParameterName.LINK) self.parametername_dataset_link.save() self.dataset_link_param = DatafileParameter( parameterset=self.datafileparameterset, name=self.parametername_dataset_link) dataset_url = self.dataset.get_absolute_url() # /dataset/1/ self.dataset_link_param.set_value(dataset_url) self.dataset_link_param.save() # Create a ParameterName type LINK to an unresolvable (non-URL) # free-text value self.parametername_unresolvable_link = ParameterName( schema=self.schema, name="freetext_link", full_name="This parameter is a non-URL LINK", data_type=ParameterName.LINK) self.parametername_unresolvable_link.save() def tearDown(self): self.exp.delete() self.user.delete() self.parametername1.delete() self.parametername2.delete() self.parametername3.delete() self.parametername_exp_link.delete() self.parametername_dataset_link.delete() self.parametername_unresolvable_link.delete() self.schema.delete() def test_existing_parameterset(self): psm = ParameterSetManager(parameterset=self.datafileparameterset) self.assertTrue( psm.get_schema().namespace == "http://localhost/psmtest/df/") self.assertTrue(psm.get_param("parameter1").string_value == "test1") self.assertTrue(psm.get_param("parameter2", True) == 2) def test_new_parameterset(self): psm = ParameterSetManager(parentObject=self.datafile, schema="http://localhost/psmtest/df2/") self.assertTrue( psm.get_schema().namespace == "http://localhost/psmtest/df2/") psm.set_param("newparam1", "test3", "New Parameter 1") self.assertTrue(psm.get_param("newparam1").string_value == "test3") self.assertTrue( psm.get_param("newparam1").name.full_name == "New Parameter 1") psm.new_param("newparam1", "test4") self.assertTrue(len(psm.get_params("newparam1", True)) == 2) psm.set_param_list("newparam2", ("a", "b", "c", "d")) self.assertTrue(len(psm.get_params("newparam2")) == 4) psm.set_params_from_dict({"newparam2": "test5", "newparam3": 3}) self.assertTrue(psm.get_param("newparam2", True) == "test5") # the newparam3 gets created and '3' is set to a string_value # since once cannot assume that an initial numeric value # will imply continuing numeric type for this new param self.assertTrue(psm.get_param("newparam3").string_value == '3') psm.delete_params("newparam1") self.assertTrue(len(psm.get_params("newparam1", True)) == 0) def test_link_parameter_type(self): """ Test that Parameter.link_gfk (GenericForeignKey) is correctly assigned after using Parameter.set_value(some_url) for a LINK Parameter. """ psm = ParameterSetManager(parameterset=self.datafileparameterset) # Check link to experiment exp_url = self.exp.get_absolute_url() # /experiment/view/1/ self.assertTrue(psm.get_param("exp_link").string_value == exp_url) self.assertTrue(psm.get_param("exp_link").link_id == self.exp.id) exp_ct = ContentType.objects.get(model__iexact="experiment") self.assertTrue(psm.get_param("exp_link").link_ct == exp_ct) self.assertTrue(psm.get_param("exp_link").link_gfk == self.exp) # Check link to dataset dataset_url = self.dataset.get_absolute_url() # /dataset/1/ self.assertTrue( psm.get_param("dataset_link").string_value == dataset_url) self.assertTrue( psm.get_param("dataset_link").link_id == self.dataset.id) dataset_ct = ContentType.objects.get(model__iexact="dataset") self.assertTrue(psm.get_param("dataset_link").link_ct == dataset_ct) self.assertTrue(psm.get_param("dataset_link").link_gfk == self.dataset) def test_link_parameter_type_extra(self): # make a second ParameterSet for testing some variations # in URL values self.datafileparameterset2 = DatafileParameterSet( schema=self.schema, datafile=self.datafile) self.datafileparameterset2.save() psm = ParameterSetManager(parameterset=self.datafileparameterset2) self.dataset_link_param2 = DatafileParameter( parameterset=self.datafileparameterset2, name=self.parametername_dataset_link) # /dataset/1 - no trailing slash dataset_url = self.dataset.get_absolute_url() self.dataset_link_param2.set_value(dataset_url) self.dataset_link_param2.save() # Check link_id/link_ct/link_gfk to dataset self.assertTrue( psm.get_param("dataset_link").link_id == self.dataset.id) dataset_ct = ContentType.objects.get(model__iexact="dataset") self.assertTrue(psm.get_param("dataset_link").link_ct == dataset_ct) self.assertTrue(psm.get_param("dataset_link").link_gfk == self.dataset) # Test links of the form /api/v1/experiment/<experiment_id>/ self.exp_link_param2 = DatafileParameter( parameterset=self.datafileparameterset2, name=self.parametername_exp_link) exp_url = '/api/v1/experiment/%s/' % self.exp.id self.exp_link_param2.set_value(exp_url) self.exp_link_param2.save() # Check link_id/link_ct/link_gfk to experiment self.assertTrue(psm.get_param("exp_link").link_id == self.exp.id) exp_ct = ContentType.objects.get(model__iexact="experiment") self.assertTrue(psm.get_param("exp_link").link_ct == exp_ct) self.assertTrue(psm.get_param("exp_link").link_gfk == self.exp) def test_unresolvable_link_parameter(self): """ Test that LINK Parameters that can't be resolved to a model (including non-URL values) still work. """ self.datafileparameterset3 = DatafileParameterSet( schema=self.schema, datafile=self.datafile) self.datafileparameterset3.save() psm = ParameterSetManager(parameterset=self.datafileparameterset3) # Create a Parameter of type LINK to an unresolvable (non-URL) # free-text value self.freetext_link_param = DatafileParameter( parameterset=self.datafileparameterset3, name=self.parametername_unresolvable_link) self.assertRaises( SuspiciousOperation, lambda: self.freetext_link_param.set_value("FREETEXT_ID_123")) def test_tz_naive_date_handling(self): """ Ensure that dates are handling in a timezone-aware way. """ psm = ParameterSetManager(parameterset=self.datafileparameterset) psm.new_param("parameter3", str(datetime(1970, 01, 01, 10, 0, 0))) expect(psm.get_param("parameter3", True))\ .to_equal(datetime(1970, 01, 01, 0, 0, 0, tzinfo=pytz.utc)) def test_tz_aware_date_handling(self): """ Ensure that dates are handling in a timezone-aware way. """ psm = ParameterSetManager(parameterset=self.datafileparameterset) psm.new_param("parameter3", '1970-01-01T08:00:00+08:00') expect(psm.get_param("parameter3", True))\ .to_equal(datetime(1970, 01, 01, 0, 0, 0, tzinfo=pytz.utc))