def __call__(self, batch=False, b_size=20, b_start=0, orphan=0, **kw): results = getSession().query(Product).all() return IContentListing(results)[b_start:b_size]
def __call__(self, **kw): query = {} query.update(**kw) catalog = getToolByName(self.context, 'portal_catalog') results = catalog(query) return IContentListing(results)
def test_listing_object_interface(self): self.assertTrue( verifyObject( IContentListingObject, IContentListing(self.catalog())[0], ), )
def crumbs(item): return view.breadcrumbs(IContentListing([item])[0])
def get_questions(self): sm = getSecurityManager() values = [v for v in self.values() if sm.checkPermission('View', v)] return IContentListing(values)
def test_making_contentlisting(self): results = self.catalog() listing = IContentListing(results) from plone.app.contentlisting.contentlisting import ContentListing self.assertTrue(isinstance(listing, ContentListing))
def manually_generated_excerpts(self): docs = [excerpt.resolve_document() for excerpt in self.model.excerpt_documents] return IContentListing(docs)
def _makequery(self, query=None, batch=False, b_start=0, b_size=30, sort_on=None, sort_order=None, limit=0, brains=False, custom_query={}): """Parse the (form)query and return using multi-adapter""" parsedquery = queryparser.parseFormquery(self.context, query, sort_on, sort_order) index_modifiers = getUtilitiesFor(IParsedQueryIndexModifier) for name, modifier in index_modifiers: if name in parsedquery: new_name, query = modifier(parsedquery[name]) parsedquery[name] = query # if a new index name has been returned, we need to replace # the native ones if name != new_name: del parsedquery[name] parsedquery[new_name] = query # Check for valid indexes catalog = getToolByName(self.context, 'portal_catalog') valid_indexes = [ index for index in parsedquery if index in catalog.indexes() ] # We'll ignore any invalid index, but will return an empty set if none # of the indexes are valid. if not valid_indexes: logger.warning( "Using empty query because there are no valid indexes used.") parsedquery = {} if not parsedquery: if brains: return [] else: return IContentListing([]) if batch: parsedquery['b_start'] = b_start parsedquery['b_size'] = b_size elif limit: parsedquery['sort_limit'] = limit if 'path' not in parsedquery: parsedquery['path'] = {'query': ''} if isinstance(custom_query, dict): # Update the parsed query with an extra query dictionary. This may # override the parsed query. The custom_query is a dictonary of # index names and their associated query values. parsedquery.update(custom_query) results = catalog(**parsedquery) if getattr(results, 'actual_result_count', False) and limit\ and results.actual_result_count > limit: results.actual_result_count = limit if not brains: results = IContentListing(results) if batch: results = Batch(results, b_size, start=b_start) return results
def table_data(self): indexes = api.portal.get_tool('portal_catalog').indexes() b_size = int(self.request.form.get('length', 10)) b_start = int(self.request.form.get('start', 0)) sort_on = self.request.get('order[0][column]', 0) sort_on = self.request.get('columns[{0}][data]'.format(sort_on), 'created') sort_on = sort_on in indexes and sort_on or 'created' sort_order = self.request.get('order[0][dir]', 'desc') sort_order = sort_order == 'asc' and 'ascending' or 'descending' sort_limit = self.request.get('sort_limit', 300) searchable_text = self.request.form.get('search[value]', '') searchable_text = safe_unicode(searchable_text) search_field = self.request.get('search_field', '') search_date = self.request.get('search_date', '') date_from = self.request.get('date_from', '') date_to = self.request.get('date_to', '') query = dict() query['b_size'] = b_size query['b_start'] = b_start query['sort_on'] = sort_on query['sort_order'] = sort_order query['sort_limit'] = sort_limit query['portal_type'] = 'Tumour' query['path'] = { 'query': '/'.join(self.context.getPhysicalPath()), 'depth': -1 } if search_field in ('task_no', 'sequencing_filename'): query[search_field] = searchable_text else: query['SearchableText'] = searchable_text if search_field in utils.progress_steps: query['steps'] = search_field elif search_field in utils.review_states: query['review_state'] = search_field else: search_words = searchable_text.lower().split() if search_words > 1: operators = ('and', 'or', 'not', '(', ')') if not any(map(lambda val: val in search_words, operators)): searchable_text = u' OR '.join(search_words) query['SearchableText'] = searchable_text if search_date in ('sampling_time', 'received_time', 'separation_time', 'extraction_time', 'library_time', 'template_time', 'sequencing_time', 'created', 'modified'): try: start_date = DateTime(date_from) except: start_date = DateTime('1970-01-01') try: end_date = DateTime(date_to) except: end_date = DateTime() query[search_date] = { 'query': sorted([start_date, end_date]), 'range': 'min:max' } try: user_search_filter = api.portal.get_registry_record( 'gene.tumour.interfaces.IGeneTumourSettings.' 'user_search_filter') except Exception as e: user_search_filter = [] logger.warn(e) if not isinstance(user_search_filter, (list, tuple)): user_search_filter = [] current_user = api.user.get_current() for group in user_search_filter: users = api.user.get_users(groupname=group) if current_user in users: searchable_text += ' ' searchable_text += safe_unicode(user_search_filter[group]) results = api.content.find(**query) results = IContentListing(results) results = Batch(results, size=b_size, start=b_start) can_review = api.user.has_permission('Review portal content', user=current_user, obj=self.context) can_changenote = api.user.has_permission('gene.tumour: Change Note', user=current_user, obj=self.context) rows = [] for item in results: obj = item.getObject() record = {} for name, value in utils.fields(): if value.__class__.__name__ in ('NamedBlobImage', 'NamedBlobFile'): record[name] = [ self.display_url(obj, name), self.download_url(obj, name) ] else: record[name] = getattr(obj, name, None) record['created'] = api.portal.get_localized_time(obj.created(), long_format=True) record['modified'] = api.portal.get_localized_time( obj.modified(), long_format=True) record['report'] = getattr(obj.aq_explicit, 'report', None) record['url'] = obj.absolute_url_path() state = api.content.get_state(obj) record['review_state'] = translate(_(state.title()), context=api.portal.getRequest()) if record['result'] in result_dict: record['result'] = translate(_(result_dict[record['result']]), context=api.portal.getRequest()) if record['treatment_situation']: record['treatment_situation'] = u','.join([ translate(_(situation_dict[item]), context=api.portal.getRequest()) for item in record['treatment_situation'] if item in situation_dict ]) record['can_versions'] = can_review record['can_changenote'] = can_changenote record['DT_RowId'] = obj.UID() record['DT_RowClass'] = '{0} {1}'.format(obj.steps, state) record['DT_RowData'] = dict() record['DT_RowData']['id'] = obj.id record['DT_RowData']['uuid'] = obj.UID() record['DT_RowData']['gid'] = obj.gid record['DT_RowData']['steps'] = obj.steps record['DT_RowData']['url'] = obj.absolute_url() record['DT_RowData']['result'] = getattr(obj, 'result', None) record['DT_RowData']['library_barcode'] = getattr( obj, 'library_barcode', None) record['steps'] = translate(_(obj.steps), context=self.request) if not can_review: pass rows.append(record) table_data = dict() table_data['draw'] = int(self.request.form.get('draw', 1)) table_data['recordsTotal'] = len(results) table_data['recordsFiltered'] = len(results) table_data['data'] = rows self.request.response.setHeader( 'Content-Disposition', 'attachment; filename={0}'.format('filename.json')) self.request.response.setHeader('Content-Type', 'application/json') return json.dumps(table_data, default=date_handler)
def contents(self): return IContentListing(self.context.getFolderContents())
def _makequery( self, query=None, batch=False, b_start=0, b_size=30, sort_on=None, sort_order=None, limit=0, brains=False, custom_query=None, ): """Parse the (form)query and return using multi-adapter""" query_modifiers = getUtilitiesFor(IQueryModifier) for name, modifier in sorted(query_modifiers, key=itemgetter(0)): query = modifier(query) parsedquery = queryparser.parseFormquery(self.context, query, sort_on, sort_order) index_modifiers = getUtilitiesFor(IParsedQueryIndexModifier) for name, modifier in index_modifiers: if name in parsedquery: new_name, query = modifier(parsedquery[name]) parsedquery[name] = query # if a new index name has been returned, we need to replace # the native ones if name != new_name: del parsedquery[name] parsedquery[new_name] = query # Check for valid indexes catalog = getToolByName(self.context, "portal_catalog") valid_indexes = [ index for index in parsedquery if index in catalog.indexes() ] # We'll ignore any invalid index, but will return an empty set if none # of the indexes are valid. if not valid_indexes: logger.warning( "Using empty query because there are no valid indexes used.") parsedquery = {} empty_query = not parsedquery # store emptiness if batch: parsedquery["b_start"] = b_start parsedquery["b_size"] = b_size elif limit: parsedquery["sort_limit"] = limit if "path" not in parsedquery: parsedquery["path"] = {"query": ""} if isinstance(custom_query, dict) and custom_query: # Update the parsed query with an extra query dictionary. This may # override the parsed query. The custom_query is a dictonary of # index names and their associated query values. parsedquery.update(custom_query) empty_query = False # filter bad term and operator in query parsedquery = self.filter_query(parsedquery) results = [] # RER.SOLRPUSH PATCH search_with_solr = False if "searchWithSolr" in parsedquery: if parsedquery["searchWithSolr"]["query"]: search_with_solr = True del parsedquery["searchWithSolr"] if not empty_query: if search_with_solr: if "SearchableText" in parsedquery: if isinstance(parsedquery["SearchableText"], dict): parsedquery["SearchableText"]["query"] = parsedquery[ "SearchableText"]["query"].rstrip("*") else: parsedquery["SearchableText"] = parsedquery[ "SearchableText"].rstrip("*") results = SolrResponse(data=solr_search( **self.clean_query_for_solr(query=parsedquery))) else: results = catalog(**parsedquery) if (getattr(results, "actual_result_count", False) and limit # noqa and results.actual_result_count > limit # noqa ): results.actual_result_count = limit if not brains and not search_with_solr: results = IContentListing(results) if batch: results = Batch(results, b_size, start=b_start) return results
def get_content_listing(self, interfaces=None, **query): """Get ContentListing from brains gotten from get_brains method.""" return IContentListing(self.get_brains(interfaces=interfaces, **query))
def available_components(self): catalog = api.portal.get_tool(name='portal_catalog') items = catalog(object_provides=IComponent.__identifier__, review_state='published') results = IContentListing(items) return results
def documents(self): return IContentListing(self.context.get_documents())
def sections(self): context = aq_inner(self.context) brains = context.getFolderContents( {"portal_type": "TransparencySection"}) # noqa return IContentListing(brains)
def documents(self): return IContentListing( self.catalog([ 'opengever.document.document', 'ftw.mail.mail', ])[:self.document_limit])
def test_listing_interface(self): self.assertTrue( verifyObject(IContentListing, IContentListing(self.catalog())))
def incidents(self): catalog = api.portal.get_tool(name='portal_catalog') items = catalog(object_provides=IIncident.__identifier__, sort_on='getObjPositionInParent') results = IContentListing(items) return results
def _makequery(self, query=None, batch=False, b_start=0, b_size=30, sort_on=None, sort_order=None, limit=0, brains=False, custom_query=None): """Parse the (form)query and return using multi-adapter""" query_modifiers = getUtilitiesFor(IQueryModifier) for name, modifier in sorted(query_modifiers, key=itemgetter(0)): query = modifier(query) parsedquery = queryparser.parseFormquery(self.context, query, sort_on, sort_order) index_modifiers = getUtilitiesFor(IParsedQueryIndexModifier) for name, modifier in index_modifiers: if name in parsedquery: new_name, query = modifier(parsedquery[name]) parsedquery[name] = query # if a new index name has been returned, we need to replace # the native ones if name != new_name: del parsedquery[name] parsedquery[new_name] = query # Check for valid indexes catalog = getToolByName(self.context, 'portal_catalog') valid_indexes = [ index for index in parsedquery if index in catalog.indexes() ] # We'll ignore any invalid index, but will return an empty set if none # of the indexes are valid. if not valid_indexes: logger.warning( "Using empty query because there are no valid indexes used.") parsedquery = {} empty_query = not parsedquery # store emptiness if batch: parsedquery['b_start'] = b_start parsedquery['b_size'] = b_size elif limit: parsedquery['sort_limit'] = limit if 'path' not in parsedquery: parsedquery['path'] = {'query': ''} if isinstance(custom_query, dict) and custom_query: # Update the parsed query with an extra query dictionary. This may # override the parsed query. The custom_query is a dictonary of # index names and their associated query values. parsedquery.update(custom_query) empty_query = False # filter bad term and operator in query parsedquery = self.filter_query(parsedquery) results = [] if not empty_query: results = catalog(**parsedquery) if getattr(results, 'actual_result_count', False) and limit\ and results.actual_result_count > limit: results.actual_result_count = limit collapse_on = self.request.get( 'collapse_on', getattr(self.context, 'collapse_on', None)) if collapse_on is not None: fc = FieldCollapser(query={'collapse_on': collapse_on}) results = LazyMap(lambda x: x, LazyFilter(results, test=fc.collapse), length=results._len, actual_result_count=results.actual_result_count) if not brains: results = IContentListing(results) if batch: results = Batch(results, b_size, start=b_start) return results
def test_simple_contentlisting(self): results = [] listing = IContentListing(results) from plone.app.contentlisting.contentlisting import ContentListing self.assertTrue(isinstance(listing, ContentListing))
def get_content_listing(self, interfaces=None, **query): return IContentListing(self.get_brains(interfaces=interfaces, **query))
def test_making_contentlistingobjects(self): results = self.catalog() listing = IContentListing(results) from plone.app.contentlisting.catalog import \ CatalogContentListingObject self.assertTrue(isinstance(listing[0], CatalogContentListingObject))
def results(self, query=None, batch=True, b_size=100, b_start=0, old=False): """ Get properly wrapped search results from the catalog. Everything in Plone that performs searches should go through this view. 'query' should be a dictionary of catalog parameters. """ if batch: query['b_start'] = b_start = int(b_start) query['b_size'] = b_size query = self.filter_query(query) if query['path'] == '/empty_path/': return {} query['sort_order'] = 'reverse' newresults = [] new_path = [] root_path = '/'.join(api.portal.get().getPhysicalPath()) # /998/govern lt = getToolByName(self, 'portal_languages') lang = lt.getPreferredLanguage() query_paths = [ root_path + '/' + lang + '/consell-de-govern/consell-de-govern/', root_path + '/' + lang + '/cs/ple-del-consell-social/', root_path + '/' + lang + '/claustre-universitari/claustre-universitari/'] username = api.user.get_current().id if root_path + '/not_anon_my_organs/' in query['path']: # Si no es anonim i ha enviat el check de "organs relacionats amb mi" # fem una cerca especial, amb un string que després eliminem if not api.user.is_anonymous(): results = [] values = api.content.find( portal_type=['genweb.organs.organgovern'], path=root_path + '/' + lang) for obj in values: organ = obj.getObject() all_roles = api.user.get_roles(username=username, obj=organ) roles = [o for o in all_roles if o in ['OG1-Secretari', 'OG2-Editor', 'OG3-Membre', 'OG4-Afectat', 'OG5-Convidat']] sessionpath = obj.getPath() if utils.checkhasRol(['OG1-Secretari', 'OG2-Editor', 'OG3-Membre', 'OG4-Afectat', 'OG5-Convidat'], roles): if type(query['path']) == str: query['path'] = sessionpath.split() else: query['path'].append(sessionpath) elif type(query['path']) == str: if query['path'] not in query_paths: return None else: for value in query['path']: if value not in query_paths: return None if query['latest_session']: if isinstance(query['path'], list): for organ in query['path']: session_path = api.content.find( path=organ, portal_type='genweb.organs.sessio', sort_on='created', sort_order='reverse') if session_path: new_path.append(session_path[0].getPath()) if isinstance(query['path'], str): session_path = api.content.find( path=query['path'], portal_type='genweb.organs.sessio', sort_on='created', sort_order='reverse') if session_path: new_path.append(session_path[0].getPath()) query['path'] = new_path # Make default view return 0 results if 'SearchableText' not in query: # La primera vez, sin seleccionar nada, están marcados todos los elementos # Hacemos el check con el Folder if 'Folder' in query['portal_type']: return None if 'genweb.organs.punt' in query['portal_type']: query['portal_type'].append('genweb.organs.subpunt') if query is None: return None else: catalog = api.portal.get_tool(name='portal_catalog') try: # for all acords or punts results = catalog(**query) all_results = [] for res in results: all_results.append(res) # for subjects aux_subject_res = catalog.searchResults(portal_type=query['portal_type'], Subject=query['SearchableText'].replace('*', '')) for res in aux_subject_res: if res not in all_results: all_results.append(res) # for documents ptype = query['portal_type'] query_docs = query query_docs['portal_type'] = "genweb.organs.document" aux_doc_res = catalog(**query_docs) for res in aux_doc_res: obj = res.getObject() parent = obj.getParentNode() if parent.portal_type in ptype: if parent not in all_results: p_brain = catalog.searchResults(portal_type=ptype, id=parent.id)[0] all_results.append(p_brain) for res in all_results: item = res.getObject() if item.portal_type == "genweb.organs.document": item = item.getParentNode() if item.portal_type == 'genweb.organs.punt': if permissions.canViewPunt(self, item): newresults.append(res) if item.portal_type == 'genweb.organs.subpunt': if permissions.canViewSubpunt(self, item): newresults.append(res) if item.portal_type == 'genweb.organs.acord': if permissions.canViewAcord(self, item): newresults.append(res) except ParseError: return [] # Old documents if old: genweborgansegg = pkg_resources.get_distribution('genweb.organs') docs_1315 = open('{}/genweb/organs/2013-2015.json'.format(genweborgansegg.location)) docs_9613 = open('{}/genweb/organs/1996-2013.json'.format(genweborgansegg.location)) data = json.loads(docs_1315.read()) data2 = json.loads(docs_9613.read()) old_results = [] for d in data: if query['SearchableText'].replace('*', '') in d['title']: if isinstance(query['path'], str): if str(d['unitat']).lower().replace(' ', '-') in query['path']: old_results.append(d) else: for path in query['path']: if str(d['unitat']).lower().replace(' ', '-') in path: old_results.append(d) for d in data2: if query['SearchableText'].replace('*', '') in str(d['text']): old_results.append(d) if batch: old_results = Batch(old_results, b_size, b_start) if 'created' not in query: return old_results else: return [] results = IContentListing(newresults) if batch: results = Batch(results, b_size, b_start) return results
def __getslice__(self, i, j): """`x.__getslice__(i, j)` <==> `x[i:j]` Use of negative indices is not supported. Deprecated since Python 2.0 but still a part of `UserList`. """ return IContentListing(self._basesequence[i:j])
def articles(self): """Returns content listing """ return IContentListing(self._objs())
def _makequery(self, query=None, batch=False, b_start=0, b_size=30, sort_on=None, sort_order=None, limit=0, brains=False, custom_query=None): """Parse the (form)query and return using multi-adapter""" # Catalog assumes we want to limit the results to b_start+b_size. We would like to limit the # search too however we don't for sure what to limit it to since we need an unknown number # of returns to fill up an filtered page # We will use a combination of hints to guess # - data about how much filtered in pages the user has click on before # - the final length if the user clicked on the last page # - a look ahead param on the collection representing the max number of unfiltered results to make up a # filtered page # if b_start >=10: # import pdb; pdb.set_trace() # Need to do this here as it removes these from the query before checksum is performed fc_ends = self._get_hint_and_remove(custom_query, 'fc_ends', str, '') fc_len = self._get_hint_and_remove(custom_query, "fc_len", int) fc_check = self._get_hint_and_remove(custom_query, 'fc_check', str) checksum = hashlib.md5( json.dumps((query, custom_query, sort_on, sort_order, b_size), sort_keys=True)).hexdigest() if fc_check != checksum: fc_ends = '' fc_len = None fc_ends = enumerate([int(i) for i in fc_ends.split(',') if i]) fc_ends = [(page, i) for page, i in fc_ends if page * b_size <= b_start + b_size] if not fc_ends: nearest_page, nearest_end = 0, 0 else: nearest_page, nearest_end = max(fc_ends) max_unfiltered_pagesize = getattr(self.context, 'max_unfiltered_page_size', 1000) additional_pages = int(floor(float(b_start) / b_size - nearest_page)) safe_start = nearest_end safe_limit = additional_pages * max_unfiltered_pagesize results = super(QueryBuilder, self)._makequery(query, batch=False, b_start=safe_start, b_size=safe_limit, sort_on=sort_on, sort_order=sort_order, limit=limit, brains=True, custom_query=custom_query) collapse_on = getattr(self.context, 'collapse_on', set()) if custom_query is not None and 'collapse_on' in custom_query: custom_collapse_on = custom_query.get('collapse_on') if hasattr(custom_collapse_on, '__iter__'): collapse_on.update(custom_collapse_on) elif type(custom_collapse_on) in [str, unicode]: collapse_on.add(custom_collapse_on) del custom_query['collapse_on'] merge_fields = getattr(self.context, 'merge_fields', None) if merge_fields is None and custom_query is not None: merge_fields = custom_query.get('merge_fields', set()) elif merge_fields is None: merge_fields = set() if collapse_on: fc = FieldCollapser(collapse_on=collapse_on, merge_fields=merge_fields) results = LazyFilterLen(results, test=fc.collapse, fc_len=fc_len) if not batch: # This is a bit of hack. for collectionfilter they iterate teh results to work out all teh values # If we are using merging then the merge doesn't really work until you get to the end. So either # collectionfilter needs to iterate first then do the count or we need iterate first in some cases # if we iterate first then do we use the max_unfiltered_pagesize as the hint on how much to look # ahead? # In this case we will assume if the Batch=False then we should iterate first to ensure merge is correct # we will do this even if there is no merge to ensure the len of the results is also accurate list(results) else: # Put this into request so it ends up the batch links self.request.form['fc_ends'] = ','.join( [str(i) for i in results.fc_ends(b_start, b_size)]) # we might have hit the end if getattr(results, 'fc_len', None) is not None: self.request.form['fc_len'] = results.fc_len # This ensures if fc_len or fc_ends are used after query is updated then we don't use these hints self.request.form['fc_check'] = checksum # This is a bit of hack. for collectionfilter they iterate teh results to work out all teh values # If we are using merging then the merge doesn't really work until you get to the end. So either # collectionfilter needs to iterate first then do the count or we need iterate first in some cases # if we iterate first then do we use the max_unfiltered_pagesize as the hint on how much to look # ahead? # In this case we will assume if the Batch=False then we should iterate first to ensure merge is correct # we will do this even if there is no merge to ensure the len of the results is also accurate if not batch: list(results) if not brains: results = IContentListing(results) if batch: results = Batch(results, b_size, start=b_start) return results
def solrResults(self, query, batch=True, b_size=20, b_start=0): """ Do the search with solr. Add to the query some solr parameters. """ solr_config = getUtility(ISolrConnectionConfig) for field in solr_config.required: query[field] = True query['facet'] = 'true' indexes_list = self.available_indexes.keys() indexes_list.append('portal_type') query['facet_field'] = indexes_list if batch: query['b_size'] = b_size query['b_start'] = b_start results = self.catalog(**query) res_dict = {'tabs': ['all']} if results.actual_result_count is None: res_dict['tot_results_len'] = 0 return res_dict res_dict['tot_results_len'] = results.actual_result_count filtered_results = [] global_facet_counts = getattr(results, 'facet_counts', None) if global_facet_counts: if hasattr(global_facet_counts, 'facet_fields'): # new c.solr with scorched lib facets = dict( (k, dict(v)) for k, v in global_facet_counts.facet_fields.items()) else: # old c.solr facets = global_facet_counts.get('facet_fields', {}) res_dict['tabs'] = self.solrAvailableTabs(facets) active_tab = self.context.REQUEST.form.get('filter_tab') if active_tab: filtered_results = self.doFilteredSearch(active_tab, query) else: if self.tabs_order[0] != "all": for tab_id in self.tabs_order: filtered_results = self.doFilteredSearch(tab_id, query) if filtered_results: break if filtered_results: facet_counts = getattr(filtered_results, 'facet_counts', None) results = IContentListing(filtered_results) else: facet_counts = getattr(results, 'facet_counts', None) results = IContentListing(results) if batch: results = Batch(results, b_size, b_start) res_dict['results'] = results if facet_counts: if hasattr(facet_counts, 'facet_fields'): # new c.solr with scorched lib facets = dict( (k, dict(v)) for k, v in facet_counts.facet_fields.items()) else: # old c.solr facets = facet_counts.get('facet_fields', {}) res_dict['indexes_dict'] = self.solrFacetsFormatter(facets) return res_dict