def get_mapping_dict(cls): answer = { 'title': dict(map(lambda x: (x[1], x[0] + 'i18n__title'), cls.resources)), 'description': dict(map(lambda x: (x[1], x[0] + 'i18n__description'), cls.resources)), 'user': dict(map(lambda x: (x[1], x[0] + '__user__name'), cls.resources)), 'nick': dict(map(lambda x: (x[1], x[0] + '__user__nick'), cls.resources)), 'category': { settings.TYPE_DATASET: 'category__categoryi18n__slug', settings.TYPE_DATASTREAM: 'category__categoryi18n__slug', settings.TYPE_VISUALIZATION: 'visualization__datastream__last_revision__category__categoryi18n__slug', }, 'revision_id': dict(map(lambda x: (x[1], x[0] + '__last_revision_id'), cls.resources)), 'published_revision_id': dict(map(lambda x: (x[1], x[0] + '__last_published_revision_id'), cls.resources)), 'resource_id': dict(map(lambda x: (x[1], x[0] + '__id'), cls.resources)), 'lib': { settings.TYPE_VISUALIZATION: 'lib' }, 'parameters': { settings.TYPE_DATASTREAM: 'parameters' } } for multiple_resources in DatalPluginPoint.get_active_with_att('multiple_resources'): for key, value in multiple_resources.get_mapping_dict().items(): inner = answer.setdefault(key, {}) inner.update(value) return answer
def load(request): """ Shows the microsite's home page :param request: """ jsonObject = None language = request.auth_manager.language account = request.account preferences = request.preferences is_preview = 'preview' in request.GET and request.GET['preview'] == 'true' builder = ThemeBuilder(preferences, is_preview, language, request.user) resources = ["ds", "vz"] resources.extend([ finder.doc_type for finder in DatalPluginPoint.get_active_with_att('finder') ]) if account.get_preference("account.dataset.show"): resources.append("dt") if is_preview or preferences["account_home"]: """ shows the home page new version""" data = builder.parse() if data: accounts_ids = data['federated_accounts_ids'] + [account.id] queryset = FinderQuerySet(FinderManager(HomeFinder), max_results=250, account_id=accounts_ids, resource=resources) paginator = Paginator(queryset, 25) revisions = paginator.page(1) if data['federated_accounts_ids']: add_domains_to_permalinks(revisions.object_list) context = data.copy() context['has_federated_accounts'] = data[ 'federated_accounts_ids'] != [] context['request'] = request context['paginator'] = paginator context['revisions'] = revisions context['categories'] = Category.objects.get_for_home( language, accounts_ids) context['categories_dict'] = {} for cat in context['categories']: key = str(cat['id']) context['categories_dict'][key] = cat['name'] return render_to_response(data['template_path'], context, context_instance=RequestContext(request)) return redirect('/search/')
def __init__(self): # establecemos conexión self.es = Elasticsearch(settings.SEARCH_INDEX["url"]) es_conf = { "settings": { "analysis": {"analyzer": {"case_insensitive_sort": {"tokenizer": "keyword", "filter": ["lowercase"]}}} } } # se crea el indice si es que no existe # Ignora que exista el indice indices = self.es.indices.create(index=settings.SEARCH_INDEX["index"], body=es_conf, ignore=400) # primera vez que empuja el index try: if indices["acknowledged"]: for doc_type in ["ds", "dt", "vz"]: self.es.indices.put_mapping( index=settings.SEARCH_INDEX["index"], doc_type=doc_type, body=self.__get_mapping(doc_type) ) for finder in DatalPluginPoint.get_active_with_att("finder"): self.es.indices.put_mapping( index=settings.SEARCH_INDEX["index"], doc_type=finder.doc_type, body=self.__get_mapping(finder.doc_type), ) # Ya existe un index except KeyError: pass self.logger = logging.getLogger(__name__)
def inplugin(value): plugins = DatalPluginPoint().get_plugins() for plugin in plugins: if plugin.is_active() and hasattr(plugin, value) and getattr( plugin, value): return True return False
def permalink(pk, obj_type): if obj_type == 'dataset': return reverse('manageDatasets.view', 'microsites.urls', kwargs={ 'dataset_id': pk, 'slug': '-' }) elif obj_type == 'datastream': return reverse('viewDataStream.view', 'microsites.urls', kwargs={ 'id': pk, 'slug': '-' }) elif obj_type == 'visualization': return reverse('chart_manager.view', 'microsites.urls', kwargs={ 'id': pk, 'slug': '-' }) for permalink in DatalPluginPoint.get_active_with_att('permalink'): if permalink.doc_type == obj_type: return permalink.permalink(pk) return None
def search(request, category=None): account = request.account preferences = request.preferences form = forms.SearchForm(request.GET) if form.is_valid(): query = form.get_query() page = form.cleaned_data.get('page') order = form.cleaned_data.get('order') reverse = form.cleaned_data.get('reverse') resource = form.cleaned_data.get('resource') all_resources = ["dt", "ds", "vz"] all_resources.extend([finder.doc_type for finder in DatalPluginPoint.get_active_with_att('finder')]) # si no se pasa resource o es cualquier cosa # utiliza el all_resources if not resource or resource not in all_resources: # en caso de que la preferencia este en False, se excluye el DT if not account.get_preference("account.dataset.show"): all_resources.remove("dt") resource=all_resources # no puede buscar por dt si account.dataset.show == False elif resource == "dt" and not account.get_preference("account.dataset.show"): raise InvalidPage # si no se informa ningun order if not order: order="_score:desc,title:asc,timestamp:desc" try: meta_data= json.loads(form.cleaned_data.get('meta_data')) except ValueError: meta_data=None accounts_ids = [x['id'] for x in account.account_set.values('id').all()] + [account.id] results, search_time, facets = FinderManager().search( query=query, account_id=accounts_ids, category_filters=category, order=order, resource=resource, reverse=reverse, meta_data=meta_data ) paginator = Paginator(results, settings.PAGINATION_RESULTS_PER_PAGE) try: page_results = paginator.page(page).object_list except EmptyPage: page_results = [] pref_search_tips = preferences['account_search_tips'] if pref_search_tips: search_tips = json.loads(pref_search_tips) else: search_tips = {} return render_to_response('search/search.html', locals()) else: raise Http404
def get_resources_types(self): resources_types = [(DatasetDBDAO, settings.TYPE_DATASET), (DataStreamDBDAO, settings.TYPE_DATASTREAM), (VisualizationDBDAO, settings.TYPE_VISUALIZATION)] for multiple_resources in DatalPluginPoint.get_active_with_att( 'multiple_resources'): resources_types.append(multiple_resources.get_resources_types()) return resources_types
def plugins_call(context, method_name): response = '' plugins = DatalPluginPoint().get_plugins() for plugin in plugins: if plugin.is_active() and hasattr(plugin, method_name): method = getattr(plugin, method_name) response += method(context) return response
def create(self, account_id, user_id, revision_id, resource_type, resource_id, action_id, resource_title, resource_category): """ Create a redis-hash and then addit to a redis-lits""" if settings.DEBUG: logger.info('Create ActivityStreamDAO %d %s' % (action_id, resource_title)) c = Cache(db=settings.CACHE_DATABASES['activity_resources']) timeformat = "%s %s %s %s" % (ugettext('APP-ON-TEXT'), "%Y-%m-%d,", ugettext('APP-AT-TEXT'), "%H:%M") now = datetime.datetime.now() time = now.strftime(timeformat) l_permalink = "" #TODO check and fix al urls. if int(action_id) != int(choices.ActionStreams.DELETE): if resource_type == settings.TYPE_DATASTREAM: l_permalink = reverse('manageDataviews.view', urlconf='workspace.urls', kwargs={'revision_id': revision_id}) elif resource_type == settings.TYPE_VISUALIZATION: l_permalink = reverse('manageVisualizations.view', urlconf='workspace.urls', kwargs={'revision_id': revision_id}) elif resource_type == settings.TYPE_DATASET: l_permalink = reverse('manageDatasets.view', urlconf='workspace.urls', kwargs={'revision_id': revision_id}) else: for plugin in DatalPluginPoint.get_plugins(): if (plugin.is_active() and hasattr(plugin, 'doc_type') and plugin.doc_type == resource_type and hasattr(plugin, 'workspace_permalink')): l_permalink = plugin.workspace_permalink(revision_id) list_key = 'activity_stream::%s' % str(account_id) n = c.incr( "%s_counter" % list_key ) # count any use of the list indexing hash and never repeat an ID activity_key = 'activity.stream_%s:%s' % (str(account_id), str(n)) activity_value = { "user_id": user_id, "revision_id": revision_id, "type": resource_type, "resource_id": resource_id, "action_id": action_id, "title": resource_title, "time": time, "resource_link": l_permalink, "category": resource_category } r1 = c.hmset(activity_key, activity_value) r2 = c.lpush(str(list_key), activity_key) if settings.DEBUG: logger.info('Saved ActivityStreamDAO {} {} {} {} {}'.format( str(r1), str(r2), list_key, activity_key, activity_value)) return list_key, activity_key, activity_value
def get_resources_types(self): resources_types = [ (DatasetDBDAO, settings.TYPE_DATASET), (DataStreamDBDAO, settings.TYPE_DATASTREAM), (VisualizationDBDAO, settings.TYPE_VISUALIZATION) ] for multiple_resources in DatalPluginPoint.get_active_with_att('multiple_resources'): resources_types.append(multiple_resources.get_resources_types()) return resources_types
def __init__(self, **fields): self.builder = DefaultImplBuilder(**fields) if int(fields['impl_type']) == SourceImplementationChoices.REST: self.builder = RESTImplBuilder(**fields) elif int(fields['impl_type']) == SourceImplementationChoices.SOAP: self.builder = SOAPImplBuilder(**fields) else: for impl_builder in DatalPluginPoint.get_active_with_att('impl_builder'): if int(fields['impl_type']) == impl_builder.impl_type: self.builder = impl_builder(**fields)
def search(request, category=None): account = request.account preferences = request.preferences form = forms.SearchForm(request.GET) if form.is_valid(): query = form.get_query() page = form.cleaned_data.get('page') order = form.cleaned_data.get('order') reverse = form.cleaned_data.get('reverse') resource = form.cleaned_data.get('resource') all_resources = ["dt", "ds", "vz"] all_resources.extend([finder.doc_type for finder in DatalPluginPoint.get_active_with_att('finder')]) # si no se pasa resource o es cualquier cosa # utiliza el all_resources if not resource or resource not in all_resources: # en caso de que la preferencia este en False, se excluye el DT if not account.get_preference("account.dataset.show"): all_resources.remove("dt") resource=all_resources # no puede buscar por dt si account.dataset.show == False elif resource == "dt" and not account.get_preference("account.dataset.show"): raise InvalidPage try: meta_data= json.loads(form.cleaned_data.get('meta_data')) except ValueError: meta_data=None accounts_ids = [x['id'] for x in account.account_set.values('id').all()] + [account.id] results, search_time, facets = FinderManager().search( query=query, account_id=accounts_ids, category_filters=category, order=order, resource=resource, reverse=reverse, meta_data=meta_data ) paginator = Paginator(results, settings.PAGINATION_RESULTS_PER_PAGE) try: page_results = paginator.page(page).object_list except EmptyPage: page_results = [] pref_search_tips = preferences['account_search_tips'] if pref_search_tips: search_tips = json.loads(pref_search_tips) else: search_tips = {} return render_to_response('search/search.html', locals()) else: raise Http404
def get_id_name(self, r): if r == 'ds': return "datastream_id" elif r == 'vz': return "visualization_id" elif r == 'dt': return "dataset_id" for finder in DatalPluginPoint.get_active_with_att('finder'): if finder.doc_type == r: return finder.id_name
def __init__(self, **fields): self.builder = DefaultImplBuilder(**fields) if int(fields['impl_type']) == SourceImplementationChoices.REST: self.builder = RESTImplBuilder(**fields) elif int(fields['impl_type']) == SourceImplementationChoices.SOAP: self.builder = SOAPImplBuilder(**fields) else: for impl_builder in DatalPluginPoint.get_active_with_att( 'impl_builder'): if int(fields['impl_type']) == impl_builder.impl_type: self.builder = impl_builder(**fields)
def get_dictionary(self, doc): if doc['type'] == 'ds': return self.get_datastream_dictionary(doc) elif doc['type'] == 'vz': return self.get_visualization_dictionary(doc) elif doc['type'] == 'dt': return self.get_dataset_dictionary(doc) for finder in DatalPluginPoint.get_active_with_att('finder'): if finder.doc_type == doc['type']: return finder.get_dictionary(doc)
def __get_mapping(self, doc_type): if doc_type == "ds": return self.__get_datastream_mapping() elif doc_type == "dt": return self.__get_dataset_mapping() elif doc_type == "vz": return self.__get_visualization_mapping() for finder in DatalPluginPoint.get_active_with_att("finder"): if finder.doc_type == doc_type: return finder.get_mapping()
def __get_mapping(self, doc_type): if doc_type == "ds": return self.__get_datastream_mapping() elif doc_type == "dt": return self.__get_dataset_mapping() elif doc_type == "vz": return self.__get_visualization_mapping() for finder in DatalPluginPoint.get_active_with_att('finder'): if finder.doc_type == doc_type: return finder.get_mapping()
def create(self, request=None, *args, **kwargs): if int(self.collect_type) == choices.CollectTypeChoices().WEBSERVICE: form = WebserviceForm elif int(self.collect_type) == choices.CollectTypeChoices().SELF_PUBLISH: form = FileForm elif int(self.collect_type) == choices.CollectTypeChoices().URL: form = URLForm else: for plugin_form in DatalPluginPoint.get_active_with_att('dataset_form'): if (int(self.collect_type) == plugin_form.collect_type): form = plugin_form return request is None and form(*args, **kwargs) or form(request.POST, request.FILES, *args, **kwargs)
def get_data_types(self): if hasattr(self, 'data_types'): return self.data_types resources = self.request.query_params.get('resources', None) if resources: return resources.split(',') answer = self._data_types for finder in DatalPluginPoint.get_active_with_att('finder'): answer.append(finder.doc_type) return answer
def load(request): """ Shows the microsite's home page :param request: """ jsonObject = None language = request.auth_manager.language account = request.account preferences = request.preferences is_preview = 'preview' in request.GET and request.GET['preview'] == 'true' builder = ThemeBuilder(preferences, is_preview, language, request.user) resources = ["ds", "vz"] resources.extend([finder.doc_type for finder in DatalPluginPoint.get_active_with_att('finder')]) if account.get_preference("account.dataset.show"): resources.append("dt") if is_preview or preferences["account_home"]: """ shows the home page new version""" data = builder.parse() if data: accounts_ids=data['federated_accounts_ids'] + [account.id] queryset = FinderQuerySet(FinderManager(HomeFinder), max_results=250, account_id=accounts_ids, resource=resources ) paginator = Paginator(queryset, 25) revisions = paginator.page(1) if data['federated_accounts_ids']: add_domains_to_permalinks(revisions.object_list) context = data.copy() context['has_federated_accounts'] = data['federated_accounts_ids'] != [] context['request'] = request context['paginator'] = paginator context['revisions'] = revisions context['categories'] = Category.objects.get_for_home(language, accounts_ids) context['categories_dict'] = {} for cat in context['categories']: key = str(cat['id']) context['categories_dict'][key] = cat['name'] return render_to_response(data['template_path'], context, context_instance=RequestContext(request)) return redirect('/search/')
def get_mapping_dict(cls): answer = { 'title': dict(map(lambda x: (x[1], x[0] + 'i18n__title'), cls.resources)), 'description': dict( map(lambda x: (x[1], x[0] + 'i18n__description'), cls.resources)), 'user': dict(map(lambda x: (x[1], x[0] + '__user__name'), cls.resources)), 'nick': dict(map(lambda x: (x[1], x[0] + '__user__nick'), cls.resources)), 'category': { settings.TYPE_DATASET: 'category__categoryi18n__slug', settings.TYPE_DATASTREAM: 'category__categoryi18n__slug', settings.TYPE_VISUALIZATION: 'visualization__datastream__last_revision__category__categoryi18n__slug', }, 'revision_id': dict( map(lambda x: (x[1], x[0] + '__last_revision_id'), cls.resources)), 'published_revision_id': dict( map(lambda x: (x[1], x[0] + '__last_published_revision_id'), cls.resources)), 'resource_id': dict(map(lambda x: (x[1], x[0] + '__id'), cls.resources)), 'lib': { settings.TYPE_VISUALIZATION: 'lib' }, 'parameters': { settings.TYPE_DATASTREAM: 'parameters', settings.TYPE_VISUALIZATION: 'parameters' } } for multiple_resources in DatalPluginPoint.get_active_with_att( 'multiple_resources'): for key, value in multiple_resources.get_mapping_dict().items(): inner = answer.setdefault(key, {}) inner.update(value) return answer
def create(self, account_id, user_id, revision_id, resource_type, resource_id, action_id, resource_title, resource_category): """ Create a redis-hash and then addit to a redis-lits""" if settings.DEBUG: logger.info('Create ActivityStreamDAO %d %s' % (action_id, resource_title)) c = Cache(db=settings.CACHE_DATABASES['activity_resources']) timeformat = "%s %s %s %s" % (ugettext('APP-ON-TEXT'), "%Y-%m-%d,", ugettext('APP-AT-TEXT'), "%H:%M") now = datetime.datetime.now() time = now.strftime(timeformat) l_permalink="" #TODO check and fix al urls. if int(action_id) != int(choices.ActionStreams.DELETE): if resource_type == settings.TYPE_DATASTREAM: l_permalink = reverse('manageDataviews.view', urlconf='workspace.urls', kwargs={'revision_id': revision_id}) elif resource_type == settings.TYPE_VISUALIZATION: l_permalink = reverse('manageVisualizations.view', urlconf='workspace.urls', kwargs={'revision_id': revision_id}) elif resource_type == settings.TYPE_DATASET: l_permalink = reverse('manageDatasets.view', urlconf='workspace.urls', kwargs={'revision_id': revision_id}) else: for plugin in DatalPluginPoint.get_plugins(): if (plugin.is_active() and hasattr(plugin, 'doc_type') and plugin.doc_type == resource_type and hasattr(plugin, 'workspace_permalink')): l_permalink = plugin.workspace_permalink(revision_id) list_key = 'activity_stream::%s' % str(account_id) n=c.incr("%s_counter" % list_key) # count any use of the list indexing hash and never repeat an ID activity_key = 'activity.stream_%s:%s' % (str(account_id), str(n)) activity_value = {"user_id": user_id, "revision_id": revision_id , "type": resource_type, "resource_id": resource_id ,"action_id": action_id , "title": resource_title, "time":time , "resource_link": l_permalink , "category": resource_category } r1 = c.hmset(activity_key, activity_value) r2 = c.lpush(str(list_key), activity_key) if settings.DEBUG: logger.info('Saved ActivityStreamDAO {} {} {} {} {}'.format(str(r1), str(r2), list_key, activity_key, activity_value)) return list_key, activity_key, activity_value
def __init__(self): # establecemos conexión self.es = Elasticsearch(settings.SEARCH_INDEX['url']) es_conf = { "settings": { "analysis": { "analyzer": { "case_insensitive_sort": { "tokenizer": "keyword", "filter": ["lowercase"] } } } } } # se crea el indice si es que no existe # Ignora que exista el indice indices = self.es.indices.create(index=settings.SEARCH_INDEX['index'], body=es_conf, ignore=400) # primera vez que empuja el index try: if indices['acknowledged']: for doc_type in ["ds", "dt", "vz"]: self.es.indices.put_mapping( index=settings.SEARCH_INDEX['index'], doc_type=doc_type, body=self.__get_mapping(doc_type)) for finder in DatalPluginPoint.get_active_with_att('finder'): self.es.indices.put_mapping( index=settings.SEARCH_INDEX['index'], doc_type=finder.doc_type, body=self.__get_mapping(finder.doc_type)) # Ya existe un index except KeyError: pass self.logger = logging.getLogger(__name__)
def update_list(request): account = request.account auth_manager = request.auth_manager preferences = account.get_preferences() language = request.auth_manager.language resources = ["ds", "vz"] resources.extend([finder.doc_type for finder in DatalPluginPoint.get_active_with_att('finder')]) if account.get_preference("account.dataset.show"): resources.append("dt") form = QueryDatasetForm(request.POST) if form.is_valid(): query = form.cleaned_data.get('search') page = form.cleaned_data.get('page') order = form.cleaned_data.get('order') order_elastic = None if order == "0": order_elastic = "title" elif order == "1": order_elastic = "last" order_type = form.cleaned_data.get('order_type') reverse = order_type.lower() == 'ascending' category_filters = form.cleaned_data.get('category_filters') if category_filters: category_filters=category_filters.split(",") builder = ThemeBuilder(preferences, False, language, request.user) data = builder.parse() if data['federated_accounts_ids']: entity = form.cleaned_data.get('entity_filters') if entity: accounts_ids = [int(entity)] else: accounts_ids = data['federated_accounts_ids'] + [account.id] typef = form.cleaned_data.get('type_filters') if typef: if typef in resources: resources = [typef] queryset = FinderQuerySet(FinderManager(HomeFinder), query = query, max_results = 250, account_id = accounts_ids, resource = resources, category_filters=category_filters, order = order_elastic, reverse = reverse ) else: all_resources = form.cleaned_data.get('all') if not all_resources: resources_type = form.cleaned_data.get('type') aux = [] for resource_name in resources_type.split(','): if resource_name in resources: aux.append(resource_name) resources=aux queryset = FinderQuerySet(FinderManager(HomeFinder), category_filters= category_filters, query=query, resource=resources, max_results=250, order=order_elastic, reverse = reverse, account_id=account.id ) paginator = Paginator(queryset, 25) revisions = paginator.page(page and page or 1) if data['federated_accounts_ids']: add_domains_to_permalinks(revisions.object_list) error = '' results = revisions.object_list else: error = 'Invalid data' results=[] categories=[] t = loader.get_template('loadHome/table.json') c = Context(locals()) return HttpResponse(t.render(c), content_type="application/json")
def __init__(self, url=None): # establecemos conexión if url: print "[WARNING] ignorando config %s, usamos url %s" % (settings.SEARCH_INDEX['url'], url) self.es = Elasticsearch(url) else: self.es = Elasticsearch(settings.SEARCH_INDEX['url']) es_conf= { "settings": { "analysis": { "filter": { "english_stop": { "type": "stop", "stopwords": "_english_" }, "light_english_stemmer": { "type": "stemmer", "language": "light_english" }, "english_possessive_stemmer": { "type": "stemmer", "language": "english" }, "light_spanish_stemmer": { "type": "stemmer", "language": "light_spanish" }, "spanish_possessive_stemmer": { "type": "stemmer", "language": "spanish" } }, "analyzer": { "case_insensitive_sort": { "tokenizer": "keyword", "filter": [ "lowercase" ] }, "english": { "tokenizer": "standard", "filter": [ "english_possessive_stemmer", "lowercase", "english_stop", "light_english_stemmer", "asciifolding" ] }, "spanish": { "tokenizer": "standard", "filter": [ "spanish_possessive_stemmer", "lowercase", "light_spanish_stemmer", ] } } } } } # se crea el indice si es que no existe # Ignora que exista el indice indices = self.es.indices.create(index=settings.SEARCH_INDEX['index'], body=es_conf, ignore=400) # primera vez que empuja el index try: if indices['acknowledged']: for doc_type in ["ds","dt","vz"]: self.es.indices.put_mapping(index=settings.SEARCH_INDEX['index'], doc_type=doc_type, body=self.__get_mapping(doc_type)) for finder in DatalPluginPoint.get_active_with_att('finder'): self.es.indices.put_mapping(index=settings.SEARCH_INDEX['index'], doc_type=finder.doc_type, body=self.__get_mapping(finder.doc_type)) # Ya existe un index except KeyError: pass self.logger = logging.getLogger(__name__)
from workspace.rest.users import RestUserViewSet router = routers.DefaultRouter() router.register(r'datastreams', RestDataStreamViewSet, base_name='datastreams') router.register(r'maps', RestMapViewSet, base_name='maps') router.register(r'charts', RestChartViewSet, base_name='charts') router.register(r'datasets', RestDataSetViewSet, base_name='datasets') router.register(r'sources', RestSourceViewSet, base_name='sources') router.register(r'tags', RestTagViewSet, base_name='tags') router.register(r'resources', MultipleResourceViewSet, base_name='resources') router.register(r'categories', RestCategoryViewSet, base_name='categories') router.register(r'users', RestUserViewSet, base_name='users') # Implemento los routers que tenga el plugin plugins = DatalPluginPoint.get_plugins() for plugin in plugins: if plugin.is_active() and hasattr(plugin, 'workspace_routers'): for router_list in plugin.workspace_routers: router.register(router_list[0], router_list[1], base_name=router_list[2]) def jsi18n(request, packages=None, domain=None): if not domain: domain = 'djangojs' return javascript_catalog(request, domain, packages) js_info_dict = { 'domain': 'djangojs', 'packages': ('workspace'), }
def __build_query(self): if settings.DEBUG: logger.info("El query es: %s" % self.query) # comodin, % = * if self.query in ("%",""): self.query="*" # en caso de usar el +, el default operador debe ser AND self.query = self.query.replace("+"," AND ") # decide que conjunto de recursos va a filtrar if self.resource == "all": self.resource = ["ds", "dt", "vz"] self.resource.extend([finder.doc_type for finder in DatalPluginPoint.get_active_with_att('finder')]) # previene un error al pasarle un string y no un LIST if type(self.resource) not in (type([]), type(())): self.resource = [self.resource] # algunas busquedas, sobre todo las federadas, # buscan en un list de account_id # Asi que si llega solo un account_id, lo mete en un list igual if type(self.account_id) in (type(str()), type(int()), type(long()), type(float())): account_ids=[int(self.account_id)] elif type(self.account_id) in (type([]), type(())): account_ids=self.account_id else: #debería ir un raise?!?!? account_ids=self.account_id filters = [ {"terms": {"account_id": account_ids}}, {"terms": {"type": self.resource}} ] if self.category_filters: # previene errores de pasar un string en el category_filters if type(self.category_filters) not in (type(tuple()), type(list())): self.category_filters=[self.category_filters] filters.append({"terms": { "categories.name": self.category_filters }}) if self.ids: # este método solo funciona si o si pasando como param UN tipo de recurso. filters.append({"terms": { "resource_id": filter(None,self.ids.split(",")) }}) if self.meta_data: key=self.meta_data.keys()[0] value=self.meta_data.values()[0] if type(value) not in (type(list()), type(tuple())): value=[value] filters.append({"terms": {key: value}}) query = { "query": { "filtered": { "query": { "query_string": { "query": self.query, "fields": ["title", "text", "text_english_stemmer", "text_spanish_stemmer"] } }, "filter": { "bool": { "must": filters } } } }, "facets": { "type": { "terms": { "field": "categories.name" } } } } return query
from workspace.rest.categories import RestCategoryViewSet from workspace.rest.users import RestUserViewSet router = routers.DefaultRouter() router.register(r'datastreams', RestDataStreamViewSet, base_name='datastreams') router.register(r'maps', RestMapViewSet, base_name='maps') router.register(r'charts', RestChartViewSet, base_name='charts') router.register(r'datasets', RestDataSetViewSet, base_name='datasets') router.register(r'sources', RestSourceViewSet, base_name='sources') router.register(r'tags', RestTagViewSet, base_name='tags') router.register(r'resources', MultipleResourceViewSet, base_name='resources') router.register(r'categories', RestCategoryViewSet, base_name='categories') router.register(r'users', RestUserViewSet, base_name='users') # Implemento los routers que tenga el plugin plugins = DatalPluginPoint.get_plugins() for plugin in plugins: if plugin.is_active() and hasattr(plugin, 'workspace_routers'): for router_list in plugin.workspace_routers: router.register(router_list[0], router_list[1], base_name=router_list[2]) def jsi18n(request, packages=None, domain=None): if not domain: domain = 'djangojs' return javascript_catalog(request, domain, packages) js_info_dict = {
def handle(self, *args, **options): # index resources if options['reindex']: # destruye el index ElasticsearchIndex().flush_index() es = ElasticsearchIndex() for dataset in Dataset.objects.filter(last_published_revision__status=StatusChoices.PUBLISHED): datasetrevision=dataset.last_published_revision search_dao = DatasetSearchDAOFactory().create(datasetrevision) search_dao.add() for vz in Visualization.objects.filter(last_published_revision__status=StatusChoices.PUBLISHED): vz_revision=vz.last_published_revision search_dao = VisualizationSearchDAOFactory().create(vz_revision) search_dao.add() h = VisualizationHitsDAO(vz_revision) doc={ 'docid': "VZ::%s" % vz.guid, "type": "vz", "doc": { "fields": { "hits": h.count(), "web_hits": h.count(channel_type=0), "api_hits": h.count(channel_type=1) } } } try: es.update(doc) except: pass # TODO Hay que usar el metodo query del DAO for datastream in DataStream.objects.filter(last_published_revision__status=StatusChoices.PUBLISHED): datastreamrevision=datastream.last_published_revision datastream_rev = DataStreamDBDAO().get( datastreamrevision.user.language, datastream_revision_id=datastreamrevision.id, published=True ) search_dao = DatastreamSearchDAOFactory().create(datastreamrevision) search_dao.add() h = DatastreamHitsDAO(datastream_rev) doc={ 'docid': "DS::%s" % datastreamrevision.datastream.guid, "type": "ds", "doc": { "fields": { "hits": h.count(), "web_hits": h.count(channel_type=0), "api_hits": h.count(channel_type=1) } } } try: es.update(doc) except: pass for plugin in DatalPluginPoint.get_active_with_att('reindex'): plugin.reindex(es)
def __build_query(self): logger.info("El query es: %s" % self.query) # decide que conjunto de recursos va a filtrar if self.resource == "all": self.resource = ["ds", "dt", "vz"] self.resource.extend([finder.doc_type for finder in DatalPluginPoint.get_active_with_att('finder')]) # previene un error al pasarle un string y no un LIST if isinstance(self.resource, str): self.resource = [self.resource] # algunas busquedas, sobre todo las federadas, # buscan en un list de account_id # Asi que si llega solo un account_id, lo mete en un list igual if type(self.account_id) in (type(str()), type(int()), type(long()), type(float())): account_ids=[int(self.account_id)] elif type(self.account_id) in (type([]), type(())): account_ids=self.account_id else: #debería ir un raise?!?!? account_ids=self.account_id filters = [ {"terms": {"account_id": account_ids}}, {"terms": {"type": self.resource}} ] if self.category_filters: filters.append({"terms": { "categories.name": self.category_filters }}) if self.ids: # este método solo funciona si o si pasando como param UN tipo de recurso. filters.append({"terms": { "resource_id": filter(None,self.ids.split(",")) }}) print self.resource print filters query = { "query": { "filtered": { "query": { "query_string": { "query": "*%s*" % self.query, "fields": ["title", "text"] } }, "filter": { "bool": { "must": filters } } } }, "facets": { "type": { "terms": { "field": "categories.name" } } } } return query
def update_list(request): account = request.account auth_manager = request.auth_manager preferences = account.get_preferences() language = request.auth_manager.language resources = ["ds", "vz"] resources.extend([ finder.doc_type for finder in DatalPluginPoint.get_active_with_att('finder') ]) if account.get_preference("account.dataset.show"): resources.append("dt") form = QueryDatasetForm(request.POST) if form.is_valid(): query = form.cleaned_data.get('search') page = form.cleaned_data.get('page') order = form.cleaned_data.get('order') order_elastic = None if order == "0": order_elastic = "title" elif order == "1": order_elastic = "last" order_type = form.cleaned_data.get('order_type') reverse = order_type.lower() == 'ascending' category_filters = form.cleaned_data.get('category_filters') if category_filters: category_filters = category_filters.split(",") builder = ThemeBuilder(preferences, False, language, request.user) data = builder.parse() if data['federated_accounts_ids']: entity = form.cleaned_data.get('entity_filters') if entity: accounts_ids = [int(entity)] else: accounts_ids = data['federated_accounts_ids'] + [account.id] typef = form.cleaned_data.get('type_filters') if typef: if typef in resources: resources = [typef] queryset = FinderQuerySet(FinderManager(HomeFinder), query=query, max_results=250, account_id=accounts_ids, resource=resources, category_filters=category_filters, order=order_elastic, reverse=reverse) else: all_resources = form.cleaned_data.get('all') if not all_resources: resources_type = form.cleaned_data.get('type') aux = [] for resource_name in resources_type.split(','): if resource_name in resources: aux.append(resource_name) resources = aux queryset = FinderQuerySet(FinderManager(HomeFinder), category_filters=category_filters, query=query, resource=resources, max_results=250, order=order_elastic, reverse=reverse, account_id=account.id) paginator = Paginator(queryset, 25) revisions = paginator.page(page and page or 1) if data['federated_accounts_ids']: add_domains_to_permalinks(revisions.object_list) error = '' results = revisions.object_list else: error = 'Invalid data' results = [] categories = [] t = loader.get_template('loadHome/table.json') c = Context(locals()) return HttpResponse(t.render(c), content_type="application/json")
def index_dashboards(self): if self.options['dashboards']: if self.options['debug']: print "[Iniciando dashboards]" for plugin in DatalPluginPoint.get_active_with_att('reindex'): plugin.reindex(self.es)
def __build_query(self): logger.info("El query es: %s" % self.query) # decide que conjunto de recursos va a filtrar if self.resource == "all": self.resource = ["ds", "dt", "vz"] self.resource.extend([ finder.doc_type for finder in DatalPluginPoint.get_active_with_att('finder') ]) # previene un error al pasarle un string y no un LIST if isinstance(self.resource, str): self.resource = [self.resource] # algunas busquedas, sobre todo las federadas, # buscan en un list de account_id # Asi que si llega solo un account_id, lo mete en un list igual if type(self.account_id) in (type(str()), type(int()), type(long()), type(float())): account_ids = [int(self.account_id)] elif type(self.account_id) in (type([]), type(())): account_ids = self.account_id else: #debería ir un raise?!?!? account_ids = self.account_id filters = [{ "terms": { "account_id": account_ids } }, { "terms": { "type": self.resource } }] if self.category_filters: filters.append( {"terms": { "categories.name": self.category_filters }}) if self.ids: # este método solo funciona si o si pasando como param UN tipo de recurso. filters.append( {"terms": { "resource_id": filter(None, self.ids.split(",")) }}) print self.resource print filters query = { "query": { "filtered": { "query": { "query_string": { "query": "*%s*" % self.query, "fields": ["title", "text"] } }, "filter": { "bool": { "must": filters } } } }, "facets": { "type": { "terms": { "field": "categories.name" } } } } return query