def stage_files_to_dataset(request, dataset_id): """ Takes a JSON list of filenames to import from the staging area to this dataset. """ if not has_dataset_write(request, dataset_id): return HttpResponseForbidden() if request.method != 'POST': # This method only accepts POSTS, so send 405 Method Not Allowed response = HttpResponse(status=405) response['Allow'] = 'POST' return response user = request.user # Incoming data MUST be JSON if not request.META['CONTENT_TYPE'].startswith('application/json'): return HttpResponse(status=400) try: files = json.loads(request.body) except: return HttpResponse(status=400) create_staging_datafiles.delay(files, user.id, dataset_id, request.is_secure()) email = {'email': user.email} return HttpResponse(json.dumps(email), status=201)
def view_full_dataset(request, dataset_id): """Displays a MX Dataset and associated information. Shows a full (hundreds of images) dataset its metadata and a list of associated files with the option to show metadata of each file and ways to download those files. With write permission this page also allows uploading and metadata editing. Settings for this view: INSTALLED_APPS += ("tardis.apps.mx_views",) DATASET_VIEWS = [("http://synchrotron.org.au/views/dataset/full", "tardis.apps.mx_views.views.view_full_dataset"),] """ dataset = Dataset.objects.get(id=dataset_id) def get_datafiles_page(): # pagination was removed by someone in the interface but not here. # need to fix. pgresults = 100 paginator = Paginator(dataset.dataset_file_set.all(), pgresults) try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 # If page request (9999) is out of range, deliver last page of results. try: return paginator.page(page) except (EmptyPage, InvalidPage): return paginator.page(paginator.num_pages) display_images = dataset.get_images() image_count = len(display_images) if image_count > 4: # take 4 evenly spaced images from the set display_images = display_images[0::image_count / 4][:4] c = Context({ 'dataset': dataset, 'datafiles': get_datafiles_page(), 'parametersets': dataset.getParameterSets() .exclude(schema__hidden=True), 'has_download_permissions': authz.has_dataset_download_access(request, dataset_id), 'has_write_permissions': authz.has_dataset_write(request, dataset_id), 'from_experiment': \ get_experiment_referer(request, dataset_id), 'other_experiments': \ authz.get_accessible_experiments_for_dataset(request, dataset_id), 'display_images': display_images, }) return HttpResponse(render_response_index( request, 'mx_views/view_full_dataset.html', c))
def view_full_dataset(request, dataset_id): """Displays a HRMC Dataset as a single scatter plot of x,y values from grfinalXX.dat and gerr.dat files Requires BDPMytardis with single Settings for this view: INSTALLED_APPS += ("tardis.apps.hrmc_views",) DATASET_VIEWS = [("http://rmit.edu.au/schemas/hrmcdataset", "tardis.apps.hrmc_views.views.view_full_dataset"),] """ logger.debug("got to hrmc views") dataset = Dataset.objects.get(id=dataset_id) # FIXME: as single image, can remove this def get_datafiles_page(): # pagination was removed by someone in the interface but not here. # need to fix. pgresults = 100 paginator = Paginator(dataset.dataset_file_set.all(), pgresults) try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 # If page request (9999) is out of range, deliver last page of results. try: return paginator.page(page) except (EmptyPage, InvalidPage): return paginator.page(paginator.num_pages) display_images = [] image_to_show = get_image_to_show(dataset) if image_to_show: display_images.append(image_to_show) c = Context({ 'dataset': dataset, 'datafiles': get_datafiles_page(), 'parametersets': dataset.getParameterSets() .exclude(schema__hidden=True), 'has_download_permissions': authz.has_dataset_download_access(request, dataset_id), 'has_write_permissions': authz.has_dataset_write(request, dataset_id), 'from_experiment': \ get_experiment_referer(request, dataset_id), 'other_experiments': \ authz.get_accessible_experiments_for_dataset(request, dataset_id), 'display_images': display_images, }) return HttpResponse( render_response_index(request, 'hrmc_views/view_full_dataset.html', c))
def add_datafile_par(request, datafile_id): parentObject = DataFile.objects.get(id=datafile_id) if authz.has_dataset_write(request, parentObject.dataset.id): return add_par(request, parentObject, otype="datafile", stype=Schema.DATAFILE) else: return return_response_error(request)
def view_full_dataset(request, dataset_id): """Displays a HRMC Dataset as a single scatter plot of x,y values from grfinalXX.dat and gerr.dat files Requires BDPMytardis with single Settings for this view: INSTALLED_APPS += ("tardis.apps.hrmc_views",) DATASET_VIEWS = [("http://rmit.edu.au/schemas/hrmcdataset", "tardis.apps.hrmc_views.views.view_full_dataset"),] """ logger.debug("got to hrmc views") dataset = Dataset.objects.get(id=dataset_id) # FIXME: as single image, can remove this def get_datafiles_page(): # pagination was removed by someone in the interface but not here. # need to fix. pgresults = 100 paginator = Paginator(dataset.dataset_file_set.all(), pgresults) try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 # If page request (9999) is out of range, deliver last page of results. try: return paginator.page(page) except (EmptyPage, InvalidPage): return paginator.page(paginator.num_pages) display_images = [] image_to_show = get_image_to_show(dataset) if image_to_show: display_images.append(image_to_show) c = Context({ 'dataset': dataset, 'datafiles': get_datafiles_page(), 'parametersets': dataset.getParameterSets() .exclude(schema__hidden=True), 'has_download_permissions': authz.has_dataset_download_access(request, dataset_id), 'has_write_permissions': authz.has_dataset_write(request, dataset_id), 'from_experiment': \ get_experiment_referer(request, dataset_id), 'other_experiments': \ authz.get_accessible_experiments_for_dataset(request, dataset_id), 'display_images': display_images, }) return HttpResponse(render_response_index( request, 'hrmc_views/view_full_dataset.html', c))
def retrieve_dataset_metadata(request, dataset_id): dataset = Dataset.objects.get(pk=dataset_id) has_write_permissions = authz.has_dataset_write(request, dataset_id) parametersets = dataset.datasetparameterset_set.exclude( schema__hidden=True) c = {'dataset': dataset, 'parametersets': parametersets, 'has_write_permissions': has_write_permissions} return HttpResponse(render_response_index(request, 'tardis_portal/ajax/dataset_metadata.html', c))
def retrieve_dataset_metadata(request, dataset_id): dataset = Dataset.objects.get(pk=dataset_id) has_write_permissions = authz.has_dataset_write(request, dataset_id) parametersets = dataset.datasetparameterset_set.exclude( schema__hidden=True) c = { 'dataset': dataset, 'parametersets': parametersets, 'has_write_permissions': has_write_permissions } return HttpResponse( render_response_index(request, 'tardis_portal/ajax/dataset_metadata.html', c))
def retrieve_parameters(request, datafile_id): parametersets = DatafileParameterSet.objects.all() parametersets = parametersets.filter(datafile__pk=datafile_id)\ .exclude(schema__hidden=True) datafile = DataFile.objects.get(id=datafile_id) dataset_id = datafile.dataset.id has_write_permissions = authz.has_dataset_write(request, dataset_id) c = {'parametersets': parametersets, 'datafile': datafile, 'has_write_permissions': has_write_permissions, 'has_download_permissions': authz.has_dataset_download_access(request, dataset_id)} return HttpResponse(render_response_index(request, 'tardis_portal/ajax/parameters.html', c))
def edit_dataset(request, dataset_id): if not has_dataset_write(request, dataset_id): return HttpResponseForbidden() dataset = Dataset.objects.get(id=dataset_id) # Process form or prepopulate it if request.method == 'POST': form = DatasetForm(request.POST) if form.is_valid(): dataset.description = form.cleaned_data['description'] dataset.save() return _redirect_303('tardis_portal.view_dataset', dataset.id) else: form = DatasetForm(instance=dataset) c = {'form': form, 'dataset': dataset} return HttpResponse( render_response_index(request, 'tardis_portal/add_or_edit_dataset.html', c))
def edit_dataset(request, dataset_id): if not has_dataset_write(request, dataset_id): return HttpResponseForbidden() dataset = Dataset.objects.get(id=dataset_id) # Process form or prepopulate it if request.method == 'POST': form = DatasetForm(request.POST) if form.is_valid(): dataset.description = form.cleaned_data['description'] dataset.save() return _redirect_303('tardis_portal.view_dataset', dataset.id) else: form = DatasetForm(instance=dataset) c = {'form': form, 'dataset': dataset} return HttpResponse(render_response_index(request, 'tardis_portal/add_or_edit_dataset.html', c))
def retrieve_parameters(request, datafile_id): parametersets = DatafileParameterSet.objects.all() parametersets = parametersets.filter(datafile__pk=datafile_id)\ .exclude(schema__hidden=True) datafile = DataFile.objects.get(id=datafile_id) dataset_id = datafile.dataset.id has_write_permissions = authz.has_dataset_write(request, dataset_id) c = { 'parametersets': parametersets, 'datafile': datafile, 'has_write_permissions': has_write_permissions, 'has_download_permissions': authz.has_dataset_download_access(request, dataset_id) } return HttpResponse( render_response_index(request, 'tardis_portal/ajax/parameters.html', c))
def create_detail(self, object_list, bundle): if not bundle.request.user.is_authenticated(): return False if bundle.request.user.is_authenticated() and \ bundle.request.user.is_superuser: return True if type(bundle.obj) == Experiment: return bundle.request.user.has_perm('tardis_portal.add_experiment') elif type(bundle.obj) in (ExperimentParameterSet,): if not bundle.request.user.has_perm( 'tardis_portal.change_experiment'): return False experiment_uri = bundle.data.get('experiment', None) if experiment_uri is not None: experiment = ExperimentResource.get_via_uri( ExperimentResource(), experiment_uri, bundle.request) return has_write_permissions(bundle.request, experiment.id) elif getattr(bundle.obj.experiment, 'id', False): return has_write_permissions(bundle.request, bundle.obj.experiment.id) return False elif type(bundle.obj) in (ExperimentParameter,): return bundle.request.user.has_perm( 'tardis_portal.change_experiment') and \ has_write_permissions(bundle.request, bundle.obj.parameterset.experiment.id) elif type(bundle.obj) == Dataset: if not bundle.request.user.has_perm( 'tardis_portal.change_dataset'): return False perm = False for exp_uri in bundle.data.get('experiments', []): try: this_exp = ExperimentResource.get_via_uri( ExperimentResource(), exp_uri, bundle.request) except: return False if has_write_permissions(bundle.request, this_exp.id): perm = True else: return False return perm elif type(bundle.obj) in (DatasetParameterSet,): if not bundle.request.user.has_perm( 'tardis_portal.change_dataset'): return False dataset_uri = bundle.data.get('dataset', None) if dataset_uri is not None: dataset = DatasetResource.get_via_uri( DatasetResource(), dataset_uri, bundle.request) return has_dataset_write(bundle.request, dataset.id) elif getattr(bundle.obj.dataset, 'id', False): return has_dataset_write(bundle.request, bundle.obj.dataset.id) return False elif type(bundle.obj) in (DatasetParameter,): return bundle.request.user.has_perm( 'tardis_portal.change_dataset') and \ has_dataset_write(bundle.request, bundle.obj.parameterset.dataset.id) elif type(bundle.obj) == Dataset_File: dataset = DatasetResource.get_via_uri(DatasetResource(), bundle.data['dataset'], bundle.request) return all([ bundle.request.user.has_perm('tardis_portal.change_dataset'), bundle.request.user.has_perm('tardis_portal.add_dataset_file'), has_dataset_write(bundle.request, dataset.id), ]) elif type(bundle.obj) == DatafileParameterSet: dataset = Dataset.objects.get( pk=bundle.obj.dataset_file.dataset.id) return all([ bundle.request.user.has_perm('tardis_portal.change_dataset'), bundle.request.user.has_perm('tardis_portal.add_dataset_file'), has_dataset_write(bundle.request, dataset.id), ]) elif type(bundle.obj) == DatafileParameter: dataset = Dataset.objects.get( pk=bundle.obj.parameterset.dataset_file.dataset.id) return all([ bundle.request.user.has_perm('tardis_portal.change_dataset'), bundle.request.user.has_perm('tardis_portal.add_dataset_file'), has_dataset_write(bundle.request, dataset.id), ]) elif type(bundle.obj) == Replica: return all([ bundle.request.user.has_perm('tardis_portal.change_dataset'), bundle.request.user.has_perm('tardis_portal.add_dataset_file'), has_dataset_write(bundle.request, bundle.obj.datafile.dataset.id), ]) raise NotImplementedError(type(bundle.obj))
def create_detail(self, object_list, bundle): # noqa # too complex if not bundle.request.user.is_authenticated(): return False if bundle.request.user.is_authenticated() and \ bundle.request.user.is_superuser: return True if isinstance(bundle.obj, Experiment): return bundle.request.user.has_perm('tardis_portal.add_experiment') elif isinstance(bundle.obj, ExperimentParameterSet): if not bundle.request.user.has_perm( 'tardis_portal.change_experiment'): return False experiment_uri = bundle.data.get('experiment', None) if experiment_uri is not None: experiment = ExperimentResource.get_via_uri( ExperimentResource(), experiment_uri, bundle.request) return has_write_permissions(bundle.request, experiment.id) elif getattr(bundle.obj.experiment, 'id', False): return has_write_permissions(bundle.request, bundle.obj.experiment.id) return False elif isinstance(bundle.obj, ExperimentParameter): return bundle.request.user.has_perm( 'tardis_portal.change_experiment') and \ has_write_permissions(bundle.request, bundle.obj.parameterset.experiment.id) elif isinstance(bundle.obj, Dataset): if not bundle.request.user.has_perm( 'tardis_portal.change_dataset'): return False perm = False for exp_uri in bundle.data.get('experiments', []): try: this_exp = ExperimentResource.get_via_uri( ExperimentResource(), exp_uri, bundle.request) except: return False if has_write_permissions(bundle.request, this_exp.id): perm = True else: return False return perm elif isinstance(bundle.obj, DatasetParameterSet): if not bundle.request.user.has_perm( 'tardis_portal.change_dataset'): return False dataset_uri = bundle.data.get('dataset', None) if dataset_uri is not None: dataset = DatasetResource.get_via_uri( DatasetResource(), dataset_uri, bundle.request) return has_dataset_write(bundle.request, dataset.id) elif getattr(bundle.obj.dataset, 'id', False): return has_dataset_write(bundle.request, bundle.obj.dataset.id) return False elif isinstance(bundle.obj, DatasetParameter): return bundle.request.user.has_perm( 'tardis_portal.change_dataset') and \ has_dataset_write(bundle.request, bundle.obj.parameterset.dataset.id) elif isinstance(bundle.obj, DataFile): dataset = DatasetResource.get_via_uri(DatasetResource(), bundle.data['dataset'], bundle.request) return all([ bundle.request.user.has_perm('tardis_portal.change_dataset'), bundle.request.user.has_perm('tardis_portal.add_datafile'), has_dataset_write(bundle.request, dataset.id), ]) elif isinstance(bundle.obj, DatafileParameterSet): dataset = Dataset.objects.get( pk=bundle.obj.datafile.dataset.id) return all([ bundle.request.user.has_perm('tardis_portal.change_dataset'), bundle.request.user.has_perm('tardis_portal.add_datafile'), has_dataset_write(bundle.request, dataset.id), ]) elif isinstance(bundle.obj, DatafileParameter): dataset = Dataset.objects.get( pk=bundle.obj.parameterset.datafile.dataset.id) return all([ bundle.request.user.has_perm('tardis_portal.change_dataset'), bundle.request.user.has_perm('tardis_portal.add_datafile'), has_dataset_write(bundle.request, dataset.id), ]) elif isinstance(bundle.obj, DataFileObject): return all([ bundle.request.user.has_perm('tardis_portal.change_dataset'), bundle.request.user.has_perm('tardis_portal.add_datafile'), has_dataset_write(bundle.request, bundle.obj.datafile.dataset.id), ]) elif isinstance(bundle.obj, ObjectACL): return bundle.request.user.has_perm('tardis_portal.add_objectacl') elif isinstance(bundle.obj, Group): return bundle.request.user.has_perm('tardis_portal.add_group') elif isinstance(bundle.obj, Facility): return bundle.request.user.has_perm('tardis_portal.add_facility') elif isinstance(bundle.obj, Instrument): facilities = facilities_managed_by(bundle.request.user) return all([ bundle.request.user.has_perm('tardis_portal.add_instrument'), bundle.obj.facility in facilities ]) raise NotImplementedError(type(bundle.obj))
def view_full_dataset(request, dataset_id): """Displays a MX Dataset and associated information. Shows a full (hundreds of images) dataset its metadata and a list of associated files with the option to show metadata of each file and ways to download those files. With write permission this page also allows uploading and metadata editing. Settings for this view: INSTALLED_APPS += ("tardis.apps.mx_views",) DATASET_VIEWS = [("http://synchrotron.org.au/views/dataset/full", "tardis.apps.mx_views.views.view_full_dataset"),] """ dataset = Dataset.objects.get(id=dataset_id) def get_datafiles_page(): # pagination was removed by someone in the interface but not here. # need to fix. pgresults = 100 paginator = Paginator(dataset.datafile_set.all(), pgresults) try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 # If page request (9999) is out of range, deliver last page of results. try: return paginator.page(page) except (EmptyPage, InvalidPage): return paginator.page(paginator.num_pages) display_images = dataset.get_images() image_count = len(display_images) if image_count > 4: # take 4 evenly spaced images from the set display_images = display_images[0::image_count / 4][:4] upload_method = getattr(settings, "UPLOAD_METHOD", "uploadify") c = { 'dataset': dataset, 'datafiles': get_datafiles_page(), 'parametersets': dataset.getParameterSets() .exclude(schema__hidden=True), 'has_download_permissions': authz.has_dataset_download_access(request, dataset_id), 'has_write_permissions': authz.has_dataset_write(request, dataset_id), 'from_experiment': \ get_experiment_referer(request, dataset_id), 'other_experiments': \ authz.get_accessible_experiments_for_dataset(request, dataset_id), 'display_images': display_images, 'upload_method': upload_method, 'default_organization': getattr(settings, 'DEFAULT_ARCHIVE_ORGANIZATION', 'classic'), 'default_format': getattr(settings, 'DEFAULT_ARCHIVE_FORMATS', ['tgz', 'tar'])[0] } return HttpResponse( render_response_index(request, 'mx_views/view_full_dataset.html', c))
def create_detail(self, object_list, bundle): # noqa # too complex if not bundle.request.user.is_authenticated(): return False if bundle.request.user.is_authenticated() and \ bundle.request.user.is_superuser: return True if isinstance(bundle.obj, Experiment): return bundle.request.user.has_perm('tardis_portal.add_experiment') elif isinstance(bundle.obj, ExperimentParameterSet): if not bundle.request.user.has_perm( 'tardis_portal.change_experiment'): return False experiment_uri = bundle.data.get('experiment', None) if experiment_uri is not None: experiment = ExperimentResource.get_via_uri( ExperimentResource(), experiment_uri, bundle.request) return has_write_permissions(bundle.request, experiment.id) elif getattr(bundle.obj.experiment, 'id', False): return has_write_permissions(bundle.request, bundle.obj.experiment.id) return False elif isinstance(bundle.obj, ExperimentParameter): return bundle.request.user.has_perm( 'tardis_portal.change_experiment') and \ has_write_permissions(bundle.request, bundle.obj.parameterset.experiment.id) elif isinstance(bundle.obj, Dataset): if not bundle.request.user.has_perm( 'tardis_portal.change_dataset'): return False perm = False for exp_uri in bundle.data.get('experiments', []): try: this_exp = ExperimentResource.get_via_uri( ExperimentResource(), exp_uri, bundle.request) except: return False if has_write_permissions(bundle.request, this_exp.id): perm = True else: return False return perm elif isinstance(bundle.obj, DatasetParameterSet): if not bundle.request.user.has_perm( 'tardis_portal.change_dataset'): return False dataset_uri = bundle.data.get('dataset', None) if dataset_uri is not None: dataset = DatasetResource.get_via_uri(DatasetResource(), dataset_uri, bundle.request) return has_dataset_write(bundle.request, dataset.id) elif getattr(bundle.obj.dataset, 'id', False): return has_dataset_write(bundle.request, bundle.obj.dataset.id) return False elif isinstance(bundle.obj, DatasetParameter): return bundle.request.user.has_perm( 'tardis_portal.change_dataset') and \ has_dataset_write(bundle.request, bundle.obj.parameterset.dataset.id) elif isinstance(bundle.obj, DataFile): dataset = DatasetResource.get_via_uri(DatasetResource(), bundle.data['dataset'], bundle.request) return all([ bundle.request.user.has_perm('tardis_portal.change_dataset'), bundle.request.user.has_perm('tardis_portal.add_datafile'), has_dataset_write(bundle.request, dataset.id), ]) elif isinstance(bundle.obj, DatafileParameterSet): dataset = Dataset.objects.get(pk=bundle.obj.datafile.dataset.id) return all([ bundle.request.user.has_perm('tardis_portal.change_dataset'), bundle.request.user.has_perm('tardis_portal.add_datafile'), has_dataset_write(bundle.request, dataset.id), ]) elif isinstance(bundle.obj, DatafileParameter): dataset = Dataset.objects.get( pk=bundle.obj.parameterset.datafile.dataset.id) return all([ bundle.request.user.has_perm('tardis_portal.change_dataset'), bundle.request.user.has_perm('tardis_portal.add_datafile'), has_dataset_write(bundle.request, dataset.id), ]) elif isinstance(bundle.obj, DataFileObject): return all([ bundle.request.user.has_perm('tardis_portal.change_dataset'), bundle.request.user.has_perm('tardis_portal.add_datafile'), has_dataset_write(bundle.request, bundle.obj.datafile.dataset.id), ]) elif isinstance(bundle.obj, ObjectACL): return bundle.request.user.has_perm('tardis_portal.add_objectacl') elif isinstance(bundle.obj, Group): return bundle.request.user.has_perm('tardis_portal.add_group') elif isinstance(bundle.obj, Facility): return bundle.request.user.has_perm('tardis_portal.add_facility') elif isinstance(bundle.obj, Instrument): facilities = facilities_managed_by(bundle.request.user) return all([ bundle.request.user.has_perm('tardis_portal.add_instrument'), bundle.obj.facility in facilities ]) raise NotImplementedError(type(bundle.obj))
def retrieve_datafile_list( request, dataset_id, template_name='tardis_portal/ajax/datafile_list.html'): params = {} query = None highlighted_dsf_pks = [] if 'query' in request.GET: search_query = FacetFixedSearchQuery() sqs = SearchQuerySet(query=search_query) query = SearchQueryString(request.GET['query']) results = sqs.raw_search(query.query_string() + ' AND dataset_id_stored:%i' % (int(dataset_id))).load_all() highlighted_dsf_pks = [ int(r.pk) for r in results if r.model_name == 'datafile' and r.dataset_id_stored == int(dataset_id) ] params['query'] = query.query_string() elif 'datafileResults' in request.session and 'search' in request.GET: highlighted_dsf_pks = [ r.pk for r in request.session['datafileResults'] ] dataset_results = \ DataFile.objects.filter( dataset__pk=dataset_id, ).order_by('filename') if request.GET.get('limit', False) and len(highlighted_dsf_pks): dataset_results = dataset_results.filter(pk__in=highlighted_dsf_pks) params['limit'] = request.GET['limit'] filename_search = None if 'filename' in request.GET and len(request.GET['filename']): filename_search = request.GET['filename'] dataset_results = \ dataset_results.filter(filename__icontains=filename_search) params['filename'] = filename_search # pagination was removed by someone in the interface but not here. # need to fix. pgresults = 100 paginator = Paginator(dataset_results, pgresults) try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 # If page request (9999) is out of range, deliver last page of results. try: dataset = paginator.page(page) except (EmptyPage, InvalidPage): dataset = paginator.page(paginator.num_pages) is_owner = False has_download_permissions = authz.has_dataset_download_access( request, dataset_id) has_write_permissions = False if request.user.is_authenticated(): is_owner = authz.has_dataset_ownership(request, dataset_id) has_write_permissions = authz.has_dataset_write(request, dataset_id) immutable = Dataset.objects.get(id=dataset_id).immutable c = { 'datafiles': dataset, 'paginator': paginator, 'immutable': immutable, 'dataset': Dataset.objects.get(id=dataset_id), 'filename_search': filename_search, 'is_owner': is_owner, 'highlighted_datafiles': highlighted_dsf_pks, 'has_download_permissions': has_download_permissions, 'has_write_permissions': has_write_permissions, 'search_query': query, 'params': urlencode(params), } _add_protocols_and_organizations(request, None, c) return HttpResponse(render_response_index(request, template_name, c))
def add_dataset_par(request, dataset_id): parentObject = Dataset.objects.get(id=dataset_id) if authz.has_dataset_write(request, parentObject.id): return add_par(request, parentObject, otype="dataset", stype=Schema.DATASET) return return_response_error(request)
def edit_datafile_par(request, parameterset_id): parameterset = DatafileParameterSet.objects.get(id=parameterset_id) if authz.has_dataset_write(request, parameterset.datafile.dataset.id): return edit_parameters(request, parameterset, otype="datafile") return return_response_error(request)
def get_context_data(self, request, dataset, **kwargs): """ Prepares the values to be passed to the default dataset view, respecting authorization rules. Returns a dict of values (the context). :param request: a HTTP request object :type request: :class:`django.http.HttpRequest` :param dataset: the Dataset model instance :type dataset: tardis.tardis_portal.models.dataset.Dataset :return: A dictionary of values for the view/template. :rtype: dict """ def get_datafiles_page(): # pagination was removed by someone in the interface but not here. # need to fix. pgresults = 100 paginator = Paginator(dataset.datafile_set.all(), pgresults) try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 # If page request is out of range (eg 9999), deliver last page of # results. try: return paginator.page(page) except (EmptyPage, InvalidPage): return paginator.page(paginator.num_pages) c = super(DatasetView, self).get_context_data(**kwargs) dataset_id = dataset.id upload_method = getattr(settings, "UPLOAD_METHOD", False) max_images_in_carousel = getattr(settings, "MAX_IMAGES_IN_CAROUSEL", 0) if max_images_in_carousel: carousel_slice = ":%s" % max_images_in_carousel else: carousel_slice = ":" c.update( {'dataset': dataset, 'datafiles': get_datafiles_page(), 'parametersets': dataset.getParameterSets().exclude( schema__hidden=True), 'has_download_permissions': authz.has_dataset_download_access( request, dataset_id), 'has_write_permissions': authz.has_dataset_write(request, dataset_id), 'from_experiment': get_experiment_referer(request, dataset_id), 'other_experiments': authz.get_accessible_experiments_for_dataset( request, dataset_id), 'upload_method': upload_method, 'push_to_enabled': PushToConfig.name in settings.INSTALLED_APPS, 'carousel_slice': carousel_slice, } ) # Enables UI elements for the push_to app if c['push_to_enabled']: push_to_args = { 'dataset_id': dataset.pk } c['push_to_url'] = reverse(initiate_push_dataset, kwargs=push_to_args) _add_protocols_and_organizations(request, dataset, c) return c
def get_context_data(self, request, dataset, **kwargs): """ Prepares the values to be passed to the default dataset view, respecting authorization rules. Returns a dict of values (the context). :param request: a HTTP request object :type request: :class:`django.http.HttpRequest` :param dataset: the Dataset model instance :type dataset: tardis.tardis_portal.models.dataset.Dataset :return: A dictionary of values for the view/template. :rtype: dict """ def get_datafiles_page(): # pagination was removed by someone in the interface but not here. # need to fix. pgresults = 100 paginator = Paginator(dataset.datafile_set.all(), pgresults) try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 # If page request is out of range (eg 9999), deliver last page of # results. try: return paginator.page(page) except (EmptyPage, InvalidPage): return paginator.page(paginator.num_pages) c = super(DatasetView, self).get_context_data(**kwargs) dataset_id = dataset.id upload_method = getattr(settings, "UPLOAD_METHOD", False) max_images_in_carousel = getattr(settings, "MAX_IMAGES_IN_CAROUSEL", 0) if max_images_in_carousel: carousel_slice = ":%s" % max_images_in_carousel else: carousel_slice = ":" c.update({ 'dataset': dataset, 'datafiles': get_datafiles_page(), 'parametersets': dataset.getParameterSets().exclude(schema__hidden=True), 'has_download_permissions': authz.has_dataset_download_access(request, dataset_id), 'has_write_permissions': authz.has_dataset_write(request, dataset_id), 'from_experiment': get_experiment_referer(request, dataset_id), 'other_experiments': authz.get_accessible_experiments_for_dataset(request, dataset_id), 'upload_method': upload_method, 'push_to_enabled': PushToConfig.name in settings.INSTALLED_APPS, 'carousel_slice': carousel_slice, }) # Enables UI elements for the push_to app if c['push_to_enabled']: push_to_args = {'dataset_id': dataset.pk} c['push_to_url'] = reverse(initiate_push_dataset, kwargs=push_to_args) _add_protocols_and_organizations(request, dataset, c) return c
def retrieve_datafile_list( request, dataset_id, template_name='tardis_portal/ajax/datafile_list.html'): params = {} query = None highlighted_dsf_pks = [] if 'query' in request.GET: search_query = FacetFixedSearchQuery() sqs = SearchQuerySet(query=search_query) query = SearchQueryString(request.GET['query']) results = sqs.raw_search( query.query_string() + ' AND dataset_id_stored:%i' % (int(dataset_id))).load_all() highlighted_dsf_pks = [int(r.pk) for r in results if r.model_name == 'datafile' and r.dataset_id_stored == int(dataset_id)] params['query'] = query.query_string() elif 'datafileResults' in request.session and 'search' in request.GET: highlighted_dsf_pks = [r.pk for r in request.session['datafileResults']] dataset_results = \ DataFile.objects.filter( dataset__pk=dataset_id, ).order_by('filename') if request.GET.get('limit', False) and len(highlighted_dsf_pks): dataset_results = dataset_results.filter(pk__in=highlighted_dsf_pks) params['limit'] = request.GET['limit'] filename_search = None if 'filename' in request.GET and len(request.GET['filename']): filename_search = request.GET['filename'] dataset_results = \ dataset_results.filter(filename__icontains=filename_search) params['filename'] = filename_search # pagination was removed by someone in the interface but not here. # need to fix. pgresults = 100 paginator = Paginator(dataset_results, pgresults) try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 # If page request (9999) is out of range, deliver last page of results. try: dataset = paginator.page(page) except (EmptyPage, InvalidPage): dataset = paginator.page(paginator.num_pages) is_owner = False has_download_permissions = authz.has_dataset_download_access(request, dataset_id) has_write_permissions = False if request.user.is_authenticated(): is_owner = authz.has_dataset_ownership(request, dataset_id) has_write_permissions = authz.has_dataset_write(request, dataset_id) immutable = Dataset.objects.get(id=dataset_id).immutable c = { 'datafiles': dataset, 'paginator': paginator, 'immutable': immutable, 'dataset': Dataset.objects.get(id=dataset_id), 'filename_search': filename_search, 'is_owner': is_owner, 'highlighted_datafiles': highlighted_dsf_pks, 'has_download_permissions': has_download_permissions, 'has_write_permissions': has_write_permissions, 'search_query': query, 'params': urlencode(params), } _add_protocols_and_organizations(request, None, c) return HttpResponse(render_response_index(request, template_name, c))
def edit_datafile_par(request, parameterset_id): parameterset = DatafileParameterSet.objects.get(id=parameterset_id) if authz.has_dataset_write(request, parameterset.datafile.dataset.id): return edit_parameters(request, parameterset, otype="datafile") else: return return_response_error(request)