def retract(self, dataset): self._get_dataset(dataset) require.dataset.update(c.dataset) if c.dataset.private: abort(400, _("This dataset is already private!")) c.dataset.private = True c.dataset.updated_at = datetime.utcnow() AggregationCache(c.dataset).invalidate() db.session.commit() # Need to invalidate the cache of the dataset index cache = DatasetIndexCache() cache.invalidate() h.flash_success(_("The dataset has been retracted. " \ "It is no longer visible to others.")) redirect(h.url_for(controller='editor', action='index', dataset=c.dataset.name))
def retract(self, dataset): self._get_dataset(dataset) require.dataset.update(c.dataset) if c.dataset.private: abort(400, _("This dataset is already private!")) c.dataset.private = True c.dataset.updated_at = datetime.utcnow() AggregationCache(c.dataset).invalidate() db.session.commit() # Need to invalidate the cache of the dataset index cache = DatasetIndexCache() cache.invalidate() h.flash_success(_("The dataset has been retracted. " "It is no longer visible to others.")) redirect(h.url_for(controller='editor', action='index', dataset=c.dataset.name))
def publish(self, dataset): self._get_dataset(dataset) require.dataset.update(c.dataset) if not c.dataset.private: abort(400, _("This dataset is already public!")) c.dataset.private = False c.dataset.updated_at = datetime.utcnow() db.session.commit() # Need to invalidate the cache of the dataset index cache = DatasetIndexCache() cache.invalidate() public_url = h.url_for(controller='dataset', action='view', dataset=c.dataset.name, qualified=True) h.flash_success(_("Congratulations, the dataset has been " \ "published. It is now available at: %s") % public_url) redirect(h.url_for(controller='editor', action='index', dataset=c.dataset.name))
def publish(self, dataset): self._get_dataset(dataset) require.dataset.update(c.dataset) if not c.dataset.private: abort(400, _("This dataset is already public!")) c.dataset.private = False c.dataset.updated_at = datetime.utcnow() db.session.commit() # Need to invalidate the cache of the dataset index cache = DatasetIndexCache() cache.invalidate() public_url = h.url_for(controller='dataset', action='view', dataset=c.dataset.name, qualified=True) h.flash_success( _("Congratulations, the dataset has been " "published. It is now available at: %s") % public_url) redirect(h.url_for(controller='editor', action='index', dataset=c.dataset.name))
def index(self, format='html'): """ Get a list of all datasets along with territory, language, and category counts (amount of datasets for each). """ # Create facet filters (so we can look at a single country, # language etc.) c.query = request.params.items() c.add_filter = lambda f, v: \ '?' + urlencode(c.query + [(f, v)] if (f, v) not in c.query else c.query) c.del_filter = lambda f, v: \ '?' + urlencode([(k, x) for k, x in c.query if (k, x) != (f, v)]) # Parse the request parameters to get them into the right format parser = DatasetIndexParamParser(request.params) params, errors = parser.parse() if errors: concatenated_errors = ', '.join(errors) abort(400, _('Parameter values not supported: %s') % concatenated_errors) # We need to pop the page and pagesize parameters since they're not # used for the cache (we have to get all of the datasets to do the # language, territory, and category counts (these are then only used # for the html response) params.pop('page') pagesize = params.pop('pagesize') # Get cached indices (this will also generate them if there are no # cached results (the cache is invalidated when a dataset is published # or retracted cache = DatasetIndexCache() results = cache.index(**params) # Generate the ETag from the last modified timestamp of the first # dataset (since they are ordered in descending order by last # modified). It doesn't matter that this happens if it has (possibly) # generated the index (if not cached) since if it isn't cached then # the ETag is definitely modified. We wrap it in a try clause since # if there are no public datasets we'll get an index error. # We also don't set c._must_revalidate to True since we don't care # if the index needs a hard refresh try: etag_cache_keygen( results['datasets'][0]['timestamps']['last_modified']) except IndexError: etag_cache_keygen(None) # Assign the results to template context variables c.language_options = results['languages'] c.territory_options = results['territories'] c.category_options = results['categories'] if format == 'json': # Apply links to the dataset lists before returning the json results['datasets'] = [dataset_apply_links(r) for r in results['datasets']] return to_jsonp(results) elif format == 'csv': # The CSV response only shows datasets, not languages, # territories, etc. return write_csv(results['datasets'], response) # If we're here then it's an html format so we show rss, do the # pagination and render the template c.show_rss = True # The page parameter we popped earlier is part of request.params but # we now know it was parsed. We have to send in request.params to # retain any parameters already supplied (filters) c.page = templating.Page(results['datasets'], items_per_page=pagesize, item_count=len(results['datasets']), **request.params) return templating.render('dataset/index.html')