def get_childs(request, revision_id): """ get all childs of a dataset :param request: :param revision_id: :return: JSON Object """ language = request.auth_manager.language dataset_revision = DatasetRevision.objects.get(pk=revision_id) try: childs = DatasetDBDAO().query_childs( language=language, dataset_id=dataset_revision.dataset.id) except DatasetRevision.DoesNotExist: raise DatasetNotFoundException() data = {} for key in childs.keys(): data[key] = [] for child in childs[key]: try: serializer = ChildDataStreamSerializer(child) data[key].append(serializer.data) except KeyError: serializer = ChildVisualizationSerializer(child) data[key].append(serializer.data) return HttpResponse(JSONRenderer().render(data, renderer_context={'indent': 4}), content_type='application/json')
def index(request): """ List all Datasets """ account_domain = request.preferences["account.domain"] ds_dao = DatasetDBDAO() filters = ds_dao.query_filters(account_id=request.user.account.id, language=request.user.language) datastream_impl_valid_choices = DATASTREAM_IMPL_VALID_CHOICES return render_to_response("manageDatasets/index.html", locals())
def index(request): """ List all Datasets :param request: """ account_domain = request.preferences['account.domain'] ds_dao = DatasetDBDAO() filters = ds_dao.query_filters(account_id=request.user.account.id, language=request.user.language) datastream_impl_valid_choices = DATASTREAM_IMPL_VALID_CHOICES return render_to_response('manageDatasets/index.html', locals())
class DataSetViewSet(mixins.CreateModelMixin, mixins.UpdateModelMixin, ResourceViewSet): queryset = DatasetDBDAO() serializer_class = DataSetSerializer lookup_field = 'guid' dao_get_param = 'guid' data_types = ['dt'] app = 'microsites'
def get_dict(self, language = 'en'): logger = logging.getLogger(__name__) from core.daos.datasets import DatasetDBDAO import time dataset = DatasetDBDAO().get(language, dataset_revision_id=self.id) account = Account.objects.get(id=self.user.account.id) text = [dataset.title, dataset.description, dataset.user_nick, str(dataset.dataset_id)] #DS uses GUID, but here doesn't exists. We use ID text.extend(dataset.get_tags()) # datastream has a table for tags but seems unused. I define get_tags funcion for dataset. text = ' '.join(text) account_id = dataset.account_id if account_id is None: account_id = '' # dataset has no parameters (I left it for compatibility or future upgrades) parameters = "" indexable_dict = { 'docid' : "DT::DATASET-ID-" + str(dataset.dataset_id), 'fields' : {'type' : 'dt', 'dataset_id': dataset.dataset_id, 'datasetrevision_id': dataset.dataset_revision_id, 'title': dataset.title, 'text': text, 'description': dataset.description, 'owner_nick' : dataset.user_nick, 'tags' : ','.join(dataset.get_tags()), 'account_id' : account_id, 'parameters': parameters, 'timestamp': int(time.mktime(dataset.created_at.timetuple())), 'end_point': dataset.end_point, }, 'categories': {'id': unicode(dataset.category_id), 'name': dataset.category_name} } # Update dict with facets try: indexable_dict = add_facets_to_doc(self, account, indexable_dict) except Exception, e: logger.error("indexable_dict ERROR: [%s]" % str(e))
def get_filters_json(request): """ List all Filters available :param request: """ if settings.DEBUG: logger.info('GET FILTERs') filters = DatasetDBDAO().query_filters(account_id=request.user.account.id, language=request.user.language) response = DefaultDictToJson().render(data=filters) # normalize=True #TODO check return HttpResponse(response, mimetype="application/json")
def view(request, revision_id): account_id = request.auth_manager.account_id credentials = request.auth_manager user_id = request.auth_manager.id language = request.auth_manager.language try: dataset = DatasetDBDAO().get(user=request.user, dataset_revision_id=revision_id) except DatasetRevision.DoesNotExist: raise DatasetNotFoundException() datastream_impl_not_valid_choices = DATASTREAM_IMPL_NOT_VALID_CHOICES return render_to_response('viewDataset/index.html', locals())
def view(request, dataset_id, slug): """ Show dataset """ account = request.account preferences = request.preferences dataset = DatasetDBDAO().get(request.user, dataset_id=dataset_id, published=True) impl_choices = choices.SourceImplementationChoices if request.GET.get('embedded', False) == 'true': return render_to_response('viewDataset/embedded.html', locals()) else: return render_to_response('viewDataset/index.html', locals())
def download(request, dataset_id, slug): """ download dataset file directly :param slug: :param dataset_id: :param request: """ logger = logging.getLogger(__name__) # get public url for datastream id try: dataset_revision_id = Dataset.objects.get(pk=dataset_id).last_published_revision.id dataset = DatasetDBDAO().get(request.user, dataset_revision_id=dataset_revision_id) except Exception, e: logger.info("Can't find the dataset: %s [%s]" % (dataset_id, str(e))) raise Http404
class RestDataSetViewSet(ResourceViewSet): queryset = DatasetDBDAO() serializer_class = DataSetSerializer lookup_field = 'id' data_types = ['dt'] dao_get_param = 'dataset_revision_id' dao_pk = 'dataset_revision_id' app = 'workspace' published = False @detail_route(methods=['get'], renderer_classes=[HTMLEngineRenderer]) def tables(self, request, pk=None, *args, **kwargs): return self.engine_call( request, 'load', form_class=DatasetLoadForm, serialize=False)
def retrieve_childs(request): language = request.auth_manager.language dataset_id = request.GET.get('dataset_id', '') # For now, we'll fetch datastreams associated_resources = DatasetDBDAO().query_childs(dataset_id=dataset_id, language=language) list_result = [] for associated_resource in associated_resources['datastreams']: associated_resource['type'] = 'dataview' list_result.append(associated_resource) for associated_resource in associated_resources['visualizations']: associated_resource['type'] = 'visualization' list_result.append(associated_resource) dump = json.dumps(list_result, cls=DjangoJSONEncoder) return HttpResponse(dump, mimetype="application/json")
def download(request, dataset_id, slug): """ download internal dataset file """ try: dataset = DatasetDBDAO().get(request.auth_manager.language, dataset_id=id, published=True) except: raise DatasetDoesNotExist else: try: response = HttpResponse(mimetype='application/force-download') response[ 'Content-Disposition'] = 'attachment; filename="{}"'.format( dataset['filename'].encode('utf-8')) response.write( urllib2.urlopen(dataset['end_point_full_url']).read()) except Exception: logger.error(dataset['end_point']) return response
def download(request, dataset_id, slug): """ download internal dataset file """ try: dataset = DatasetDBDAO().get(request.user, dataset_id=dataset_id, published=True) except: raise DatasetDoesNotExist else: if dataset['collect_type'] == choices.CollectTypeChoices.SELF_PUBLISH: try: return redirect(dataset['end_point_full_url']) except Exception as e: logger.exception("Error en descarga de archivo %s" % dataset['end_point_full_url']) elif dataset['collect_type'] == choices.CollectTypeChoices.URL: try: return redirect(dataset['end_point']) except Exception as e: logger.exception("Error en descarga de archivo %s" % dataset['end_point']) raise PermissionDenied
def validate(self, data): guid = data.pop('dataset', None) if guid: try: self.dataset = DatasetDBDAO().get( self.context['request'].auth['language'], guid=guid, published=False) data['dataset'] = Dataset.objects.get( id=self.dataset['dataset_id']) except ObjectDoesNotExist: # TODO: mejorar errores raise exceptions.ValidationError( {'dataset': 'Dataset no existe'}) if data['dataset'].last_revision.impl_type not in DATASTREAM_IMPL_VALID_CHOICES: # TODO: mejorar errores raise exceptions.ValidationError({ 'dataset': 'El tipo de archivo no permite creacion de vistas' }) if 'table_id' in data: table_id = data.pop('table_id') data['select_statement'] = SelectStatementBuilder().build( table_id) data['data_source'] = DataSourceBuilder().build( table_id, data['dataset'].last_revision_id, 'microsites') if 'category' in data and data['category']: data['category'] = self.getCategory(data['category']).id data['status'] = StatusChoices.PENDING_REVIEW data['language'] = self.context['request'].auth['language'] return data
def _wrapped_view(request, *args, **kwargs): dao = DatasetDBDAO() query, total_resources = dao.query(account_id=request.account.id, language=request.user.language) if total_resources == 0 or request.GET.get('test-no-datasets', False) == '1': raise AnyDatasetRequiredException() return view_func(request, *args, **kwargs)
def get_dataset(self, resource_id=None, revision_id=None, published=True): from core.daos.datasets import DatasetDBDAO return DatasetDBDAO().get(self.user, dataset_id=resource_id, dataset_revision_id=revision_id, published=published)
def validate(self, data): guid = data.pop('dataset', None) if guid: try: self.dataset = DatasetDBDAO().get(self.context['request'].user, guid=guid, published=False) data['dataset'] = Dataset.objects.get( id=self.dataset['dataset_id']) except ObjectDoesNotExist: # TODO: mejorar errores raise exceptions.ValidationError( {'dataset': 'Dataset no existe'}) if data['dataset'].last_revision.impl_type not in DATASTREAM_IMPL_VALID_CHOICES: # TODO: mejorar errores raise exceptions.ValidationError({ 'dataset': 'El tipo de archivo no permite creacion de vistas' }) if 'table_id' in data: table_id = data.pop('table_id') data['select_statement'] = SelectStatementBuilder().build( table_id) header_row = None if 'header_row' in data: header_row = data.pop('header_row') data['data_source'] = DataSourceBuilder().build( table_id, header_row, data['dataset'].last_revision_id, 'microsites') if 'category' in data and data['category']: data['category'] = self.getCategory(data['category']).id if 'tags' in data: if data['tags']: data['tags'] = map(lambda x: {'name': x}, data['tags'].split(',')) else: data.pop('tags') if 'meta_text' in data: meta_data = self.context['request'].auth['account'].meta_data if meta_data: try: meta_text_json = json.loads(data['meta_text']) except: raise exceptions.ValidationError( {'meta_text': 'Invalid Json'}) meta_form = MetaForm(meta_text_json, metadata=meta_data) if meta_form.is_valid(): data['meta_text'] = meta_form.output_json() else: raise exceptions.ValidationError({ 'meta_text': 'Invalid %s' % meta_form.errors.as_text() }) else: data['meta_text'] = '' else: data['meta_text'] = '' data['status'] = StatusChoices.PENDING_REVIEW data['language'] = self.context['request'].auth['language'] return data
def edit(request, dataset_revision_id=None): account_id = request.auth_manager.account_id auth_manager = request.auth_manager language = request.auth_manager.language user_id = request.auth_manager.id # TODO: Put line in a common place collect_types = {0: "file", 1: "url", 2: "webservice"} # TODO: Review. Category was not loading options from form init. category_choices = [ [category["category__id"], category["name"]] for category in CategoryI18n.objects.filter(language=language, category__account=account_id).values( "category__id", "name" ) ] if request.method == "GET": status_options = auth_manager.get_allowed_actions() # Get data set and the right template depending on the collected type dataset = DatasetDBDAO().get(language=language, dataset_revision_id=dataset_revision_id) url = "editDataset/{0}.html".format(collect_types[dataset["collect_type"]]) # Import the form that we really need if collect_types[dataset["collect_type"]] is not "url": className = [collect_types[dataset["collect_type"]].capitalize(), "Form"] else: className = ["Dataset", "Form"] className = "".join(str(elem) for elem in className) mod = __import__("workspace.manageDatasets.forms", fromlist=[className]) initial_values = dict( # Dataset Form dataset_id=dataset.get("id"), title=dataset.get("title"), description=dataset.get("description"), category=dataset.get("category_id"), status=dataset.get("status"), notes=dataset.get("notes"), file_name=dataset.get("filename"), end_point=dataset.get("end_point"), impl_type=dataset.get("impl_type"), license_url=dataset.get("license_url"), spatial=dataset.get("spatial"), frequency=dataset.get("frequency"), mbox=dataset.get("mbox"), sources=dataset.get("sources"), tags=dataset.get("tags"), ) form = getattr(mod, className)(status_options=status_options) form.label_suffix = "" form.fields["category"].choices = category_choices form.initial = initial_values return render_to_response(url, locals()) elif request.method == "POST": """ Update dataset """ form = DatasetFormFactory(request.POST.get("collect_type")).create( request, account_id=account_id, language=language, status_choices=auth_manager.get_allowed_actions() ) if form.is_valid(): lifecycle = DatasetLifeCycleManager(user=request.user, dataset_revision_id=dataset_revision_id) dataset_revision = lifecycle.edit( collect_type=request.POST.get("collect_type"), changed_fields=form.changed_data, language=language, **form.cleaned_data ) data = dict( status="ok", messages=[ugettext("APP-DATASET-CREATEDSUCCESSFULLY-TEXT")], dataset_revision_id=dataset_revision.id, ) return HttpResponse(json.dumps(data), content_type="text/plain") else: raise DatasetSaveException(form.errors)
def edit(request, dataset_revision_id=None): account_id = request.auth_manager.account_id auth_manager = request.auth_manager language = request.auth_manager.language user_id = request.auth_manager.id extensions_list = SOURCE_EXTENSION_LIST # TODO: Put line in a common place collect_types = {0: 'file', 1: 'url', 2: 'webservice'} # TODO: Review. Category was not loading options from form init. category_choices = [[ category['category__id'], category['name'] ] for category in CategoryI18n.objects.filter( language=language, category__account=account_id).values( 'category__id', 'name')] # Get data set and the right template depending on the collected type dataset = DatasetDBDAO().get(user=request.user, dataset_revision_id=dataset_revision_id) initial_values = dict( # Dataset Form dataset_id=dataset.get('id'), title=dataset.get('title'), description=dataset.get('description'), category=dataset.get('category_id'), status=dataset.get('status'), notes=dataset.get('notes'), file_name=dataset.get('filename'), end_point=dataset.get('end_point'), impl_type=dataset.get('impl_type'), license_url=dataset.get('license_url'), spatial=dataset.get('spatial'), frequency=dataset.get('frequency'), mbox=dataset.get('mbox'), sources=dataset.get('sources'), tags=dataset.get('tags'), doc=dataset.get('doc')) if request.method == 'GET': status_options = auth_manager.get_allowed_actions() url = 'editDataset/{0}.html'.format( collect_types[dataset['collect_type']]) # Import the form that we really need if collect_types[dataset['collect_type']] is not 'url': className = [ collect_types[dataset['collect_type']].capitalize(), "Form" ] else: className = ['Dataset', "Form"] className = ''.join(str(elem) for elem in className) mod = __import__('workspace.manageDatasets.forms', fromlist=[className]) form = getattr(mod, className)(status_options=status_options) form.label_suffix = '' form.fields['category'].choices = category_choices form.initial = initial_values return render_to_response(url, locals()) elif request.method == 'POST': """ Update dataset """ form = DatasetFormFactory(request.POST.get('collect_type')).create( request, account_id=account_id, language=language, status_choices=auth_manager.get_allowed_actions()) # Agrego los valores iniciales para que el changed_data de correctamente form.initial = initial_values if form.is_valid(): lifecycle = DatasetLifeCycleManager( user=request.user, dataset_revision_id=dataset_revision_id) dataset_revision = lifecycle.edit( collect_type=request.POST.get('collect_type'), changed_fields=form.changed_data, language=language, **form.cleaned_data) # Signal dataset_changed.send_robust(sender='edit_view', id=lifecycle.dataset.id, rev_id=lifecycle.dataset_revision.id) data = dict( status='ok', messages=[ugettext('APP-DATASET-CREATEDSUCCESSFULLY-TEXT')], dataset_revision_id=dataset_revision.id) return HttpResponse(json.dumps(data), content_type='text/plain') else: raise DatasetSaveException(form)
def change_status(request, dataset_revision_id=None): """ Change dataset status :param request: :param dataset_revision_id: :return: JSON Object """ if dataset_revision_id: lifecycle = DatasetLifeCycleManager( user=request.user, dataset_revision_id=dataset_revision_id) action = request.POST.get('action') action = 'accept' if action == 'approve' else action # fix para poder llamar dinamicamente al metodo de lifecycle killemall = True if request.POST.get('killemall', False) == 'true' else False if action not in [ 'accept', 'reject', 'publish', 'unpublish', 'send_to_review', 'publish_all' ]: raise NoStatusProvidedException() if action == 'unpublish': getattr(lifecycle, action)(killemall) # Signal dataset_unpublished.send_robust( sender='change_status_view', id=lifecycle.dataset.id, rev_id=lifecycle.dataset_revision.id) elif action == 'publish_all': getattr(lifecycle, 'publish')(accept_children=True) else: getattr(lifecycle, action)() if action == 'accept': title = ugettext('APP-DATASET-APPROVED-TITLE'), description = ugettext('APP-DATASET-APPROVED-TEXT') elif action == 'reject': title = ugettext('APP-DATASET-REJECTED-TITLE'), description = ugettext('APP-DATASET-REJECTED-TEXT') elif action == 'publish': title = ugettext('APP-DATASET-PUBLISHED-TITLE'), description = ugettext('APP-DATASET-PUBLISHED-TEXT') elif action == 'unpublish': if killemall: description = ugettext('APP-DATASET-UNPUBLISHALL-TEXT') else: description = ugettext('APP-DATASET-UNPUBLISH-TEXT') title = ugettext('APP-DATASET-UNPUBLISH-TITLE'), elif action == 'send_to_review': title = ugettext('APP-DATASET-SENDTOREVIEW-TITLE'), description = ugettext('APP-DATASET-SENDTOREVIEW-TEXT') elif action == 'publish_all': title = ugettext('APP-DATASET-PUBLISHALL-TITLE'), description = ugettext('APP-DATASET-PUBLISHALL-TEXT') response = dict(status='ok', messages={ 'title': title, 'description': description }) # Limpio un poco response['result'] = DatasetDBDAO().get( request.user, dataset_revision_id=dataset_revision_id) account = request.account msprotocol = 'https' if account.get_preference( 'account.microsite.https') else 'http' response['result'][ 'public_url'] = msprotocol + "://" + request.preferences[ 'account.domain'] + reverse( 'manageDatasets.view', urlconf='microsites.urls', kwargs={ 'dataset_id': response['result']['dataset_id'], 'slug': '-' }) response['result'].pop('datastreams') response['result'].pop('visualizations') response['result'].pop('tags') response['result'].pop('sources') return JSONHttpResponse(json.dumps(response, cls=DateTimeEncoder))
def getDao(self, dataset_revision): return DatasetDBDAO().get( dataset_revision_id=dataset_revision.id, language=self.context['request'].auth['language'], published=False)
def filter(request, page=0, itemsxpage=settings.PAGINATION_RESULTS_PER_PAGE): """ filter resources :param itemsxpage: :param page: :param request: """ bb_request = request.GET filters_param = bb_request.get('filters') filters_dict = dict() filter_name = '' sort_by = bb_request.get("sort_by", None) order = bb_request.get("order", "asc") exclude = None if filters_param is not None and filters_param != '': filters = json.loads(filters_param) filters_dict['impl_type'] = filters.get('type') filters_dict['category__categoryi18n__name'] = filters.get('category') filters_dict['dataset__user__nick'] = filters.get('author') filters_dict['status'] = filters.get('status') if bb_request.get('page') is not None and bb_request.get('page') != '': page = int(bb_request.get('page')) if bb_request.get('q') is not None and bb_request.get('q') != '': filter_name = bb_request.get('q') if bb_request.get( 'itemxpage') is not None and bb_request.get('itemxpage') != '': itemsxpage = int(bb_request.get('itemxpage')) if bb_request.get('collect_type', None) is not None: # If File Dataset, set impl_types as valid ones. File = 0 if bb_request.get('collect_type') in map(lambda x: str(x), COLLECT_TYPE_FILTERABLES): exclude = [{ 'dataset__type__in': COLLECT_TYPE_FILTERABLES, 'impl_type__in': DATASTREAM_IMPL_NOT_VALID_CHOICES, }, { 'size__gt': settings.MAX_DATASTREAM_SIZE, }] # define la forma de ordenamiento if sort_by: if sort_by == "category": sort_by = "category__categoryi18n__name" elif sort_by == "title": sort_by = "dataseti18n__title" elif sort_by == "author": sort_by = "dataset__user__nick" if order == "desc": sort_by = "-" + sort_by else: # no se por que setea un orden de este tipo si no # se envia el parametro sort_by = '-id' total_resources = request.stats['account_total_datasets'] resources, total_entries = DatasetDBDAO().query( account_id=request.account.id, language=request.user.language, page=page, itemsxpage=itemsxpage, filters_dict=filters_dict, sort_by=sort_by, filter_name=filter_name, exclude=exclude) total_categories, total_authors, total_statuses = DatasetDBDAO( ).query_total_filters(account_id=request.account.id, language=request.user.language, page=page, itemsxpage=itemsxpage, filters_dict=filters_dict, sort_by=sort_by, filter_name=filter_name, exclude=exclude) for resource in resources: resource['url'] = reverse('manageDatasets.view', urlconf='workspace.urls', kwargs={'revision_id': resource['id']}) data = { 'total_entries': total_entries, 'total_resources': total_resources, 'resources': resources, 'total_categories': total_categories, 'total_authors': total_authors, 'total_statuses': total_statuses } if settings.DEBUG: logger.info('filter dataset: %d, %s' % (total_entries, str(total_resources))) response = DatasetList().render(data) return HttpResponse(response, content_type="application/json")
def getDao(self, dataset_revision): return DatasetDBDAO().get(user=self.context['request'].user, dataset_revision_id=dataset_revision.id, published=False)
dataset_orig = Dataset.objects.get(pk=dataset_id) except Dataset.DoesNotExist, DatasetRevision.DoesNotExist: logger.error('Dataset doesn\'t exists [%s|%s]' % (str(dataset_id), str(account.id))) raise DatasetDoesNotExist except Exception, e: logger.error('Dataset error [%s|%s]=%s' % (str(dataset_id), str(account.id), repr(e))) raise DatasetError if not dataset_orig.last_published_revision: logger.error('Dataset {} has no published revision'.format(dataset_id)) raise Http404 dataset = DatasetDBDAO().get( request.auth_manager.language, dataset_revision_id=dataset_orig.last_published_revision.id) return render_to_response('viewDataset/index.html', locals()) @require_http_methods(["GET"]) def download(request, dataset_id, slug): """ download internal dataset file """ try: dataset = DatasetDBDAO().get(request.auth_manager.language, dataset_id=id, published=True) except: raise DatasetDoesNotExist else: