def batch(self): """Returns data rows for order management table. """ orders = [] sorting = [] # use only selected fields in the table columns = self.order_management_columns() for order_nr in self.registry.getOrders(): data, sort = self._get_order_data(order_nr, columns) index = len(orders) - 1 while index >= 0 and sort < sorting[index]: index -= 1 sorting.insert(index + 1, sort) orders.insert(index + 1, data) if self.reverse: orders.reverse() pagesize = self.pagesize if NEW_BATCHING: b = Batch(orders, size=pagesize, start=self.pagenumber * pagesize) else: b = Batch(orders, pagesize=pagesize, pagenumber=self.pagenumber) return b
def view(self): session = DBSession() query = (session.query(TodoItem) .filter(TodoItem.parent_id == self.context.id) .order_by(TodoItem.todostate) .order_by(TodoItem.modification_date.desc()) ) items = query.all() page = self.request.params.get('page', 1) settings = todos_settings() if settings['use_batching']: items = Batch.fromPagenumber(items, pagesize=settings['pagesize'], pagenumber=int(page)) return { 'api': template_api(self.context, self.request), 'macros': get_renderer('templates/macros.pt').implementation(), 'items': items, 'settings': settings, }
def view(self): session = DBSession() query = session.query(SoftwareProject).filter( SoftwareProject.parent_id == self.context.id) items = query.all() # [TODO] Are these calls too expensive? [item.refresh_pypi() for item in items] [item.refresh_github() for item in items] [item.refresh_bitbucket() for item in items] if self.context.sort_order_is_ascending: items = sorted(items, key=lambda x: x.date) else: items = sorted(items, key=lambda x: x.date, reverse=True) page = self.request.params.get('page', 1) settings = collection_settings() if settings['use_batching']: items = Batch.fromPagenumber(items, pagesize=settings['pagesize'], pagenumber=int(page)) return { 'api': template_api(self.context, self.request), 'macros': get_renderer('templates/macros.pt').implementation(), 'items': items, 'settings': settings, }
def get_subfolder_table(self): view = SubFoldersFacetedTableView(self.context, self.request) data = api.content.find( context=self.context, portal_type="ClassificationSubfolder", ) return view.render_table(Batch(data, 9999))
def search(self, query=None, page=None, b_size=None, uids=None): catalog = getToolByName(self.context, 'portal_catalog') registry = getUtility(IRegistry) settings = registry.forInterface(ICoverSettings) searchable_types = settings.searchable_content_types #temporary we'll only list published elements catalog_query = {'sort_on': 'effective', 'sort_order': 'descending'} catalog_query['portal_type'] = searchable_types if query: catalog_query = {'SearchableText': '%s*' % query} # XXX: not implemented, this is needed? # if uids: # catalog_query['UID'] = uids results = catalog(**catalog_query) results = Batch.fromPagenumber(items=results, pagesize=b_size, pagenumber=page) return results
def extend_with_batching(widget, results): page = 1 if widget.request.get('page'): page = int(widget.request.get('page')) batch = Batch.fromPagenumber(results, pagenumber=page) batch_view = RefBrowserBatchView(widget, widget.request) return (batch, batch_view(batch, minimal_navigation=True))
def _makequery(self, query=None, batch=False, b_start=0, b_size=30, sort_on=None, sort_order=None, limit=0, brains=False, custom_query=None): results = super(QueryBuilder, self)._makequery(query, batch=False, b_start=b_start, b_size=b_size, sort_on=sort_on, sort_order=sort_order, limit=limit, brains=True, custom_query=custom_query) sorting = self.request.form.get('sorting', '') # if sorting is None make it an empty list sorting = isinstance(sorting, basestring) and sorting.split(',') or [] # apply the custom sorting to the resultset according to # our sorting list positions = {j: i for i, j in enumerate(sorting)} results = sorted(results, key=lambda item: positions.get(item.UID, 999)) if not brains: results = IContentListing(results) if batch: results = Batch(results, b_size, start=b_start) return results
def batch(self): pagesize = self.pagesize if self.show_all: pagesize = len(self.items) b = Batch.fromPagenumber(self.items, pagesize=pagesize, pagenumber=self.pagenumber) map(self.set_checked, b) return b
def execute(self, query, secure=True, **kw): ''' The sort parameter ''' start = query.pop('batch_start', 0) step = query.pop('batch_step', 100) catalog = api.portal.get_tool(name='portal_catalog') if secure: search = catalog.searchResults else: search = catalog.unrestrictedSearchResults sort = kw.get('sort') if sort and isinstance(sort, basestring): # valid sort values: # - 'created': sort results ascending by creation date # - '-created': sort results descending by creation date # - 'title': sort results ascending by title if sort == 'title': sort = 'sortable_title' if sort.startswith('-'): query['sort_order'] = 'descending' query['sort_on'] = sort[1:] else: query['sort_on'] = sort brains = search(query) return Batch(brains, step, start)
def batch(self, batch=True, bsize=0, b_start=0): request = self.request self.b_start = b_start or request.form.get('b_start') or 0 perform_search = 'searchInTable' in request.form.keys() bsize = bsize or self.context.getBatchSize() or request.form.get( 'bsize') or 0 batch = batch and bsize > 0 if not batch: self._rows = self.rows(search=perform_search) return self._rows self._rows = self.rows(batch=batch, bsize=bsize, b_start=self.b_start, search=perform_search) # replicating foo elements to reach total size self._rows = [None] * self.b_start + self._rows + [None] * ( self.result_length - self.b_start - bsize) return Batch(self._rows, bsize, start=self.b_start, end=self.b_start + bsize, orphan=0, overlap=0, pagerange=7)
def view(self): session = DBSession() query = session.query(Category).filter( Category.parent_id == self.context.id) items = query.all() todos_data = {} for state in todo_states: todos_data[state] = 0 todos_data['total'] = 0 modification_dates_and_items = [] for item in items: if item.children: done_count = 0 for todo in item.children: todos_data[todo.todostate] += 1 if todo.todostate == 'done': done_count += 1 todos_data['total'] += len(item.children) sorted_todoitems = sorted(item.children, key=lambda x: x.modification_date, reverse=True) modification_dates_and_items.append( (sorted_todoitems[0].modification_date, sorted_todoitems[0], done_count, item)) else: modification_dates_and_items.append( (item.modification_date, item, 0, item)) items = sorted(modification_dates_and_items) page = self.request.params.get('page', 1) settings = todos_settings() if settings['use_batching']: items = Batch.fromPagenumber(items, pagesize=settings['pagesize'], pagenumber=int(page)) return { 'api': template_api(self.context, self.request), 'macros': get_renderer('templates/macros.pt').implementation(), 'items': items, 'todos_data': todos_data, 'data_keys': todo_states + ['total'], 'settings': settings, }
def datasets(self): data = [ self.decorate_dataset(dataset) for dataset in self.organization_data() ] # noqa b_size = self.request.get("b_size", 20) b_start = self.request.get("b_start", 0) return Batch(data, b_size, b_start)
def _makequery(self, query=None, batch=False, b_start=0, b_size=30, sort_on=None, sort_order=None, limit=0, brains=False): """Parse the (form)query and return using multi-adapter""" parsedquery = queryparser.parseFormquery(self.context, query, sort_on, sort_order) index_modifiers = getUtilitiesFor(IParsedQueryIndexModifier) for name, modifier in index_modifiers: if name in parsedquery: new_name, query = modifier(parsedquery[name]) parsedquery[name] = query # if a new index name has been returned, we need to replace # the native ones if name != new_name: del parsedquery[name] parsedquery[new_name] = query # Check for valid indexes catalog = getToolByName(self.context, 'portal_catalog') valid_indexes = [ index for index in parsedquery if index in catalog.indexes() ] # We'll ignore any invalid index, but will return an empty set if none # of the indexes are valid. if not valid_indexes: logger.warning( "Using empty query because there are no valid indexes used.") parsedquery = {} if not parsedquery: if brains: return [] else: return IContentListing([]) if batch: parsedquery['b_start'] = b_start parsedquery['b_size'] = b_size elif limit: parsedquery['sort_limit'] = limit if 'path' not in parsedquery: parsedquery['path'] = {'query': ''} results = catalog(**parsedquery) if not brains: results = IContentListing(results) if batch: results = Batch(results, b_size, start=b_start) return results
def batched_news(self): query = {"portal_type": "News Item", "sort_on": "effective", "sort_order": "descending", } news = api.content.find(**query) pagenumber = int(self.request.get("p", "1")) batch = Batch.fromPagenumber(items=news, pagesize=6, pagenumber=pagenumber, navlistsize=0) return batch
def batch(self): catalog = getToolByName(self.context, 'portal_catalog') portal_properties = getToolByName(self.context, 'portal_properties') use_view_action = portal_properties.site_properties.getProperty( 'typesUseViewActionInListings', ()) props = portal_properties.pcommerce_properties columns = int(props.getProperty('columns', 3)) width = int(props.getProperty('thumb_width', 0)) width = width and 'image/thumb?width=%s' % width or 'image_thumb' results = catalog(object_provides=IProduct.__identifier__, path={ 'query': '/'.join(self.context.getPhysicalPath()), 'depth': 1 }, sort_on='getObjPositionInParent') items = [] i = 0 start = (self.page - 1) * (columns * 5) end = start + columns * 5 for item in results: url = item.getURL() if item.portal_type in use_view_action: url += '/view' if start <= i < end: object = item.getObject() col = i % columns + 1 adapter = IPricing(object) image = None if object.getImage(): image = { 'caption': object.getImageCaption(), 'thumb': '%s/%s' % (item.getURL(), width) } item = { 'uid': item.UID, 'class': 'col%s' % col, 'title': item.Title, 'description': item.Description, 'price': CurrencyAware(adapter.getPrice()), 'base_price': CurrencyAware(adapter.getBasePrice()), 'offer': adapter.getPrice() < adapter.getBasePrice(), 'image': image, 'url': url } else: item = { 'uid': item.UID, 'title': item.Title, 'description': item.Description, 'url': url } i += 1 items.append(item) return Batch(items, columns * 5, self.page, 5)
def test_custom_batching_is_available(self): batch = Batch.fromPagenumber([item for item in range(1, 100)], pagesize=10, pagenumber=1) batching = self.portal.restrictedTraverse('@@batchnavigation') doc = PyQuery(batching(batch)) self.assertTrue(doc('.onegovBatching.listingBar'), 'Did not found the onegov batching')
def datasets(self): data = self.organization_data() try: datasets = data.xpath("//dataset") datasets = [self.decorate_dataset(dataset) for dataset in datasets] b_size = self.request.get("b_size", 20) b_start = self.request.get("b_start", 0) return Batch(datasets, b_size, b_start) except AttributeError: return []
def execute(self, query, secure=True, **kw): start = query.pop('batch_start', 0) step = query.pop('batch_step', 100) catalog = api.portal.get_tool(name='portal_catalog') if secure: search = catalog.searchResults else: search = catalog.unrestrictedSearchResults brains = search(query) return Batch(brains, step, start)
def results(self, batch=True, b_start=0, b_size=None, sort_on=None, limit=None, brains=False, custom_query=None): results = super(SortableCollection, self).results( batch, b_start, b_size, sort_on, limit, brains, custom_query) positions = {j: i for i, j in enumerate(self.sorting)} results = sorted( results, key=lambda item: positions.get(item.uuid(), 999)) if batch: results = Batch(results, b_size, start=b_start) return results
def test_ElementNumberColumn(self): """A base column using 'Title' metadata but rendered as a link to the element.""" # create some testingtype instances to build a batch for i in range(0, 8): api.content.create(container=self.eea_folder, type='testingtype', title='My testing type {0}'.format(i)) # create a batch with every elements brains = self.portal.portal_catalog(portal_type='testingtype') self.assertEquals(len(brains), 8) # without batch table = BrainsWithoutBatchTable(self.portal, self.portal.REQUEST) self.assertEquals(len(table.values), 8) column = ElementNumberColumn(self.portal, self.portal.REQUEST, table) self.assertEqual(column.renderCell(table.values[0]), 1) self.assertEqual(column.renderCell(table.values[1]), 2) self.assertEqual(column.renderCell(table.values[2]), 3) self.assertEqual(column.renderCell(table.values[3]), 4) self.assertEqual(column.renderCell(table.values[4]), 5) self.assertEqual(column.renderCell(table.values[5]), 6) self.assertEqual(column.renderCell(table.values[6]), 7) self.assertEqual(column.renderCell(table.values[7]), 8) # with batch table = self.faceted_z3ctable_view column = ElementNumberColumn(self.portal, self.portal.REQUEST, table) batch = Batch(brains, size=5) table.update(batch) self.assertEqual(batch.start, 1) self.assertEqual(column.renderCell(batch._sequence[0]), 1) self.assertEqual(column.renderCell(batch._sequence[1]), 2) self.assertEqual(column.renderCell(batch._sequence[2]), 3) self.assertEqual(column.renderCell(batch._sequence[3]), 4) self.assertEqual(column.renderCell(batch._sequence[4]), 5) # next 5 others (3 last actually) are accessible if batch start changed self.assertRaises(ValueError, column.renderCell, batch._sequence[5]) batch.start = 6 self.assertEqual(column.renderCell(batch._sequence[5]), 6) self.assertEqual(column.renderCell(batch._sequence[6]), 7) self.assertEqual(column.renderCell(batch._sequence[7]), 8)
def update(self): uids = [] request = self.request b_start = request.form.get('b_start', 0) catalog = getToolByName(self.context, 'portal_catalog') self.batch = Batch( catalog(has_apple_news=True, sort_on='Date', sort_order='descending'), start=b_start, size=50 ) if request.method.lower() == 'post': messages = [] authenticator = getMultiAdapter( (self.context, request), name=u"authenticator" ) if not authenticator.verify(): raise Unauthorized uids = request.get('uids', []) if not uids: return count = 0 brains = catalog(has_apple_news=True, UID=uids) for b in brains: obj = b.getObject() adapter = IAppleNewsActions(obj, alternate=None) if adapter is not None: try: adapter.update_article() count += 1 except AppleNewsError as e: log(u'Handled Apple News Error in bulk update ' u'{}: {}'.format(e, e.data)) if e.code == 409: messages.append( u'Unable to update article "{}" '.format( safe_unicode(b.Title) ) + u'because there are conflicting changes ' u'in Apple News Publisher' ) else: messages.append( u'Unable to update article "{}" '.format( safe_unicode(b.Title) ) + u'check logs for details.' ) msg_adapter = IStatusMessage(self.request) msg_adapter.add( u'Updated {} Apple News article with {} errors'.format( count, len(brains) - count ), type=u"info" ) for msg in messages: msg_adapter.add(msg, type=u'error')
def update(self): start = int(self.request.get('b_start', 0)) selected_start = self.startDate() selected_end = self.endDate() order_sequence = LazyFilteredOrders(storage.get_storage(), selected_start, selected_end, csv=False) if len(order_sequence) > 0: self.orders_exist = True self.batch = Batch(order_sequence, size=50, start=start) super(OrderControlPanelView, self).update()
def batch(self): """Batch of Products (brains).""" context = aq_inner(self.context) search_filter = dict( path='/'.join(context.getPhysicalPath()), review_state='published', portal_type='Place' ) catalog = getToolByName(context, 'portal_catalog') brains = catalog.searchResults(search_filter) batch = Batch.fromPagenumber( items=brains, pagesize=2000, pagenumber=self.pagenum, navlistsize=5) return batch
def __call__(self, query, batch=False, b_start=0, b_size=30, sort_on=None, sort_order=None, limit=0, brains=False, custom_query=None): order_by_id = self.order_by_id order_by_title = self.order_by_title _batch = batch _sort = False if order_by_id or order_by_title: batch = False _sort = True # Get default results results = super(QueryBuilder, self).__call__( query, batch=batch, b_start=b_start, b_size=b_size, sort_on=sort_on, sort_order=sort_order, limit=limit, brains=brains, custom_query=custom_query ) if _sort: if order_by_id: results = sorted( results, key=lambda x: self.id_order(order_by_id, x), ) elif order_by_title: # precompiling regexes order_by_title = [ re.compile(x) for x in order_by_title ] results = sorted( results, key=lambda x: self.title_order(order_by_title, x), ) if _batch: return Batch(results, b_size, start=b_start) return results
def listRenderedContainedElements(self, portal_types=(), widgets_to_render=(), b_size=30, b_start=0): """ Get the contained elements, rendered for display. If p_portal_types is specified, only return elements having the required portal_type. If p_widgets_to_render is specified, only render given fields/widgets. """ result = IListContainedDexterityObjectsForDisplay( self.context).listContainedObjects(portal_types, widgets_to_render, b_start=b_start, b_size=b_size) batch = Batch(result, b_size, b_start, orphan=1) return batch
def listings(self, b_start=None, b_size=None): """get a page of listings""" if b_size is None: b_size = self.batch_size if b_start is None: b_start = (getattr(self, 'page', 1) - 1) * b_size content_filter = {} is_collection = self.context.portal_type == 'Collection' if not is_collection: content_filter = { 'portal_type': 'Event', 'sort_on': 'start', 'sort_order': 'ascending', 'review_state': 'published', } text = self.request.get('SearchableText') if text: content_filter['SearchableText'] = text search_all = self.request.get('SearchAll') if search_all == 'yes': content_filter['start'] = { 'query': DateTime('1900/01/01'), 'range': 'min' } elif search_all == 'no' or not is_collection: content_filter['start'] = {'query': DateTime(), 'range': 'min'} start = self.request.get('start') if start: content_filter['start'] = start end = self.request.get('end') if end: content_filter['end'] = end if is_collection: batch = self.context.results(batch=True, b_start=b_start, b_size=b_size, brains=True, custom_query=content_filter) else: catalog = getToolByName(self.context, 'portal_catalog') items = catalog(**content_filter) batch = Batch(items, b_size, b_start) return batch
def search(self): kw = self.request.form b_start = int(kw.pop('b_start', 0)) b_size = int(min(kw.pop('b_size', 50), 50)) # remove path and portal_type # TODO: go through catalog indices and restrict parameters to index # names kw.pop('path', None) kw.pop('portal_type', None) kw.pop('object_provides', None) kw.pop('used', None) kw.pop('_merge', None) pc = getToolByName(self.context, 'portal_catalog') kw.update({ 'object_provides': 'org.bccvl.site.content.interfaces.IDataset', # 'object_provides': IDataset.__identifier__, 'path': '/'.join(self.context.getPhysicalPath()), }) batch = Batch(pc.searchResults(**kw), b_size, b_start) result = { 'total': batch.sequence_length, 'length': batch.length, 'b_start': b_start, 'b_size': b_size, 'results': [] } # TODO: could add next/prev links to make batch nav easier for brain in batch: result['results'].append({ 'url': brain.getURL(), 'uuid': brain.UID, 'id': brain.getId, # 'BCCCategory': brain.BCCCategory, 'BCCDataGenre': brain.BCCDataGenre, # 'BCCEnviroLayer': brain.BCCEnviroLayer, # 'BCCEmissionScenairo': brain.BCCEmissionScenairo, # 'BCCGlobalClimateModel': brain.BCCGlobalClimateModel, 'BCCResolution': brain.BCCResolution, 'Description': brain.Description, 'Title': brain.Title, 'job_state': brain.job_state, }) return result
def view_blog(self): settings = blog_settings() macros = get_renderer('templates/macros.pt').implementation() session = DBSession() query = session.query(BlogEntry).filter(\ BlogEntry.parent_id == self.context.id).order_by(BlogEntry.date.desc()) items = query.all() items = [item for item in items if has_permission('view', item, self.request)] page = self.request.params.get('page', 1) if settings['use_batching']: items = Batch.fromPagenumber(items, pagesize=settings['pagesize'], pagenumber=int(page)) return { 'api': template_api(self.context, self.request), 'macros': macros, 'items': items, 'settings': settings, }
def view(self): session = DBSession() query = session.query(Topic).filter( Topic.parent_id == self.context.id) items = query.all() modification_dates_and_items = [] for item in items: if item.children: sorted_posts = sorted(item.children, key=lambda x: x.modification_date, reverse=True) modification_dates_and_items.append( (sorted_posts[0].modification_date, sorted_posts[0], item)) else: modification_dates_and_items.append( (item.modification_date, item, item)) if self.context.sort_order_is_ascending: items = sorted(modification_dates_and_items) else: items = sorted(modification_dates_and_items, reverse=True) page = self.request.params.get('page', 1) settings = forum_settings() if settings['use_batching']: items = Batch.fromPagenumber(items, pagesize=settings['pagesize'], pagenumber=int(page)) return { 'api': template_api(self.context, self.request), 'macros': get_renderer('templates/macros.pt').implementation(), 'items': items, 'settings': settings, }
def update(self): orders = list(_fetch_orders(storage.get_storage(), key=(), csv=False)) orders.sort(key=lambda o: o.get('date_sort', ''), reverse=True) start = int(self.request.get('b_start', 0)) if len(orders) > 0: self.orders_exist = True self.most_recent_order_date = orders[0]['date'] self.first_order_date = orders[len(orders) - 1]['date'] # default in case date selection integrity check fails # this could happen if end date < start date self.end_index = 0 self.start_index = len(orders) - 1 selected_start = self.startDate() selected_end = self.endDate() if self.check_date_integrity(): filtered_orders = [] index_list = [] count = -1 for order in orders: count += 1 if order['datetime'].date() > selected_end: continue if order['datetime'].date() < selected_start: break filtered_orders.append(order) index_list.append(count) if len(index_list) > 0: # it is possible no orders are found # even if the date range is correct self.end_index = min(index_list) self.start_index = max(index_list) orders = filtered_orders self.batch = Batch(orders, size=50, start=start) super(OrderControlPanelView, self).update()
def results(self, batch=True, b_start=0, b_size=None, sort_on=None, limit=None, brains=False, custom_query=None): results = super(SortableCollectionBehavior, self).results(batch, b_start, b_size, sort_on, limit, brains, custom_query) # apply the custom sorting to the resultset according to # our sorting list positions = {j: i for i, j in enumerate(self.sorting)} results = sorted(results, key=lambda item: positions.get(item.uuid(), 999)) if batch: if not b_size: b_size = self.item_count results = Batch(results, b_size, start=b_start) return results
def search(self, query=None, page=None, b_size=None, uids=None): catalog = getToolByName(self.context, "portal_catalog") registry = getUtility(IRegistry) settings = registry.forInterface(ICoverSettings) searchable_types = settings.searchable_content_types # temporary we'll only list published elements catalog_query = {"sort_on": "effective", "sort_order": "descending"} catalog_query["portal_type"] = searchable_types if query: catalog_query = {"SearchableText": "%s*" % query} # XXX: not implemented, this is needed? # if uids: # catalog_query['UID'] = uids results = catalog(**catalog_query) results = Batch.fromPagenumber(items=results, pagesize=b_size, pagenumber=page) return results
def nearest(self): catalog = getToolByName(self.context, 'portal_catalog') class NeighborBrain(AbstractCatalogBrain, NoBrainer): pass cschema = catalog._catalog.schema scopy = cschema.copy() scopy['data_record_id_'] = len(cschema.keys()) scopy['data_record_score_'] = len(cschema.keys())+1 scopy['data_record_normalized_score_'] = len(cschema.keys())+2 scopy['distance'] = len(cschema.keys())+3 scopy['center'] = len(cschema.keys())+4 NeighborBrain.__record_schema__ = scopy try: g = IGeoreferenced(self.context) except: return [] def gen(): for brain in catalog( geolocation={'query': (g.bounds, 10), 'range': 'nearest'}, portal_type={'query': ['Place']}, sort_index='geolocation', ): if brain.getId == self.context.getId(): # skip self continue neighbor = NeighborBrain().__of__(catalog) for k in brain.__record_schema__.keys(): neighbor[k] = brain[k] neighbor['distance'] = self.distance(brain) neighbor['center'] = self.center(brain) yield neighbor b_size = 20 b_start = self.request.get('b_start', 0) batch = Batch(list(gen()), b_size, int(b_start), orphan=0) return batch
def view_blog(context, request): settings = blog_settings() macros = get_renderer('templates/macros.pt').implementation() session = DBSession() query = session.query(BlogEntry).filter(\ BlogEntry.parent_id == context.id).order_by(BlogEntry.date) items = query.all() page = request.params.get('page', 1) if settings['use_batching']: items = Batch.fromPagenumber(items, pagesize=settings['pagesize'], pagenumber=int(page)) for item in items: item.formatted_date = format_date(item.date) return { 'api': template_api(context, request), 'macros': macros, 'items': items, 'settings': settings, }
def _makequery(self, query=None, batch=False, b_start=0, b_size=30, sort_on=None, sort_order=None, limit=0, brains=False, custom_query=None): """Parse the (form)query and return using multi-adapter""" # Catalog assumes we want to limit the results to b_start+b_size. We would like to limit the # search too however we don't for sure what to limit it to since we need an unknown number # of returns to fill up an filtered page # We will use a combination of hints to guess # - data about how much filtered in pages the user has click on before # - the final length if the user clicked on the last page # - a look ahead param on the collection representing the max number of unfiltered results to make up a # filtered page # if b_start >=10: # import pdb; pdb.set_trace() # Need to do this here as it removes these from the query before checksum is performed fc_ends = self._get_hint_and_remove(custom_query, 'fc_ends', str, '') fc_len = self._get_hint_and_remove(custom_query, "fc_len", int) fc_check = self._get_hint_and_remove(custom_query, 'fc_check', str) checksum = hashlib.md5( json.dumps((query, custom_query, sort_on, sort_order, b_size), sort_keys=True)).hexdigest() if fc_check != checksum: fc_ends = '' fc_len = None fc_ends = enumerate([int(i) for i in fc_ends.split(',') if i]) fc_ends = [(page, i) for page, i in fc_ends if page * b_size <= b_start + b_size] if not fc_ends: nearest_page, nearest_end = 0, 0 else: nearest_page, nearest_end = max(fc_ends) max_unfiltered_pagesize = getattr(self.context, 'max_unfiltered_page_size', 1000) additional_pages = int(floor(float(b_start) / b_size - nearest_page)) safe_start = nearest_end safe_limit = additional_pages * max_unfiltered_pagesize results = super(QueryBuilder, self)._makequery(query, batch=False, b_start=safe_start, b_size=safe_limit, sort_on=sort_on, sort_order=sort_order, limit=limit, brains=True, custom_query=custom_query) collapse_on = getattr(self.context, 'collapse_on', set()) if custom_query is not None and 'collapse_on' in custom_query: custom_collapse_on = custom_query.get('collapse_on') if hasattr(custom_collapse_on, '__iter__'): collapse_on.update(custom_collapse_on) elif type(custom_collapse_on) in [str, unicode]: collapse_on.add(custom_collapse_on) del custom_query['collapse_on'] merge_fields = getattr(self.context, 'merge_fields', None) if merge_fields is None and custom_query is not None: merge_fields = custom_query.get('merge_fields', set()) elif merge_fields is None: merge_fields = set() if collapse_on: fc = FieldCollapser(collapse_on=collapse_on, merge_fields=merge_fields) results = LazyFilterLen(results, test=fc.collapse, fc_len=fc_len) if not batch: # This is a bit of hack. for collectionfilter they iterate teh results to work out all teh values # If we are using merging then the merge doesn't really work until you get to the end. So either # collectionfilter needs to iterate first then do the count or we need iterate first in some cases # if we iterate first then do we use the max_unfiltered_pagesize as the hint on how much to look # ahead? # In this case we will assume if the Batch=False then we should iterate first to ensure merge is correct # we will do this even if there is no merge to ensure the len of the results is also accurate list(results) else: # Put this into request so it ends up the batch links self.request.form['fc_ends'] = ','.join( [str(i) for i in results.fc_ends(b_start, b_size)]) # we might have hit the end if getattr(results, 'fc_len', None) is not None: self.request.form['fc_len'] = results.fc_len # This ensures if fc_len or fc_ends are used after query is updated then we don't use these hints self.request.form['fc_check'] = checksum # This is a bit of hack. for collectionfilter they iterate teh results to work out all teh values # If we are using merging then the merge doesn't really work until you get to the end. So either # collectionfilter needs to iterate first then do the count or we need iterate first in some cases # if we iterate first then do we use the max_unfiltered_pagesize as the hint on how much to look # ahead? # In this case we will assume if the Batch=False then we should iterate first to ensure merge is correct # we will do this even if there is no merge to ensure the len of the results is also accurate if not batch: list(results) if not brains: results = IContentListing(results) if batch: results = Batch(results, b_size, start=b_start) return results
def batch(self, observations, b_size, b_start, orphan, b_start_str): observationsBatch = Batch(observations, int(b_size), int(b_start), orphan=1) observationsBatch.batchformkeys = [] observationsBatch.b_start_str = b_start_str return observationsBatch
def _makequery(self, query=None, batch=False, b_start=0, b_size=30, sort_on=None, sort_order=None, limit=0, brains=False, custom_query=None): """Parse the (form)query and return using multi-adapter""" query_modifiers = getUtilitiesFor(IQueryModifier) for name, modifier in sorted(query_modifiers, key=itemgetter(0)): query = modifier(query) parsedquery = queryparser.parseFormquery(self.context, query, sort_on, sort_order) index_modifiers = getUtilitiesFor(IParsedQueryIndexModifier) for name, modifier in index_modifiers: if name in parsedquery: new_name, query = modifier(parsedquery[name]) parsedquery[name] = query # if a new index name has been returned, we need to replace # the native ones if name != new_name: del parsedquery[name] parsedquery[new_name] = query # Check for valid indexes catalog = getToolByName(self.context, 'portal_catalog') valid_indexes = [ index for index in parsedquery if index in catalog.indexes() ] # We'll ignore any invalid index, but will return an empty set if none # of the indexes are valid. if not valid_indexes: logger.warning( "Using empty query because there are no valid indexes used.") parsedquery = {} empty_query = not parsedquery # store emptiness if batch: parsedquery['b_start'] = b_start parsedquery['b_size'] = b_size elif limit: parsedquery['sort_limit'] = limit if 'path' not in parsedquery: parsedquery['path'] = {'query': ''} if isinstance(custom_query, dict) and custom_query: # Update the parsed query with an extra query dictionary. This may # override the parsed query. The custom_query is a dictonary of # index names and their associated query values. parsedquery.update(custom_query) empty_query = False # filter bad term and operator in query parsedquery = self.filter_query(parsedquery) results = [] if not empty_query: results = catalog(**parsedquery) if getattr(results, 'actual_result_count', False) and limit\ and results.actual_result_count > limit: results.actual_result_count = limit collapse_on = self.request.get( 'collapse_on', getattr(self.context, 'collapse_on', None)) if collapse_on is not None: fc = FieldCollapser(query={'collapse_on': collapse_on}) results = LazyMap(lambda x: x, LazyFilter(results, test=fc.collapse), length=results._len, actual_result_count=results.actual_result_count) if not brains: results = IContentListing(results) if batch: results = Batch(results, b_size, start=b_start) return results
def _makequery(self, query=None, batch=False, b_start=0, b_size=30, sort_on=None, sort_order=None, limit=0, brains=False, custom_query={}): """Parse the (form)query and return using multi-adapter""" parsedquery = queryparser.parseFormquery(self.context, query, sort_on, sort_order) index_modifiers = getUtilitiesFor(IParsedQueryIndexModifier) for name, modifier in index_modifiers: if name in parsedquery: new_name, query = modifier(parsedquery[name]) parsedquery[name] = query # if a new index name has been returned, we need to replace # the native ones if name != new_name: del parsedquery[name] parsedquery[new_name] = query # Check for valid indexes catalog = getToolByName(self.context, 'portal_catalog') valid_indexes = [ index for index in parsedquery if index in catalog.indexes() ] # We'll ignore any invalid index, but will return an empty set if none # of the indexes are valid. if not valid_indexes: logger.warning( "Using empty query because there are no valid indexes used.") parsedquery = {} if not parsedquery: if brains: return [] else: return IContentListing([]) if batch: parsedquery['b_start'] = b_start parsedquery['b_size'] = b_size elif limit: parsedquery['sort_limit'] = limit if 'path' not in parsedquery: parsedquery['path'] = {'query': ''} if isinstance(custom_query, dict): # Update the parsed query with an extra query dictionary. This may # override the parsed query. The custom_query is a dictonary of # index names and their associated query values. parsedquery.update(custom_query) results = catalog(**parsedquery) if getattr(results, 'actual_result_count', False) and limit\ and results.actual_result_count > limit: results.actual_result_count = limit if not brains: results = IContentListing(results) if batch: results = Batch(results, b_size, start=b_start) return results
def batch(self): items = self.context.get_items() batch_size = self.context.batch_size or sys.maxint page = int(self.request.get('%spage' % self.prefix, 0)) return Batch.fromPagenumber(items, batch_size, page + 1)
def batch(self): batch_size = 10 page = int(self.request.get('page', '0')) return Batch(self.notices, start=page * batch_size, size=batch_size)
def results(self): return Batch(self.adapted.get_items(self.tags), 99999, start=0)
def view(self): session = DBSession() # Posts, if we have them. order_by = Post.modification_date if not self.context.sort_order_is_ascending: order_by = Post.modification_date.desc() query = (session.query(Post) .filter(Post.parent_id == self.context.id) .order_by(order_by) ) top_level_posts = query.all() post_counts_and_trees = [] for post in top_level_posts: if post.children: tree = nodes_tree(self.request, post) post_count = len(tree.tolist()) else: tree = (post) post_count = 1 post_tree = { 'tree': { 'children': [tree], }, } post_counts_and_trees.append((post_count, post, post_tree)) # Votes, if we have them. votes = None vote_data = {} vote_data['Sum'] = 0 vote_data['Count'] = 0 vote_data['Plus'] = 0 vote_data['Zero'] = 0 vote_data['Minus'] = 0 votes_and_vote_objs = [] if self.context.votable: query = session.query(Vote).filter( Vote.parent_id == self.context.id) votes = query.all() for vote in votes: vote_data['Sum'] += vote.vote vote_data['Count'] += 1 if vote.vote > 0: vote_data['Plus'] += 1 elif vote.vote == 0: vote_data['Zero'] += 1 else: vote_data['Minus'] += 1 votes_and_vote_objs.append((vote.vote, vote, 'vote')) if votes_and_vote_objs: if self.context.sort_order_is_ascending: votes_and_vote_objs = sorted(votes_and_vote_objs) else: votes_and_vote_objs = sorted(votes_and_vote_objs, reverse=True) page = self.request.params.get('page', 1) settings = forum_settings() if settings['use_batching']: post_items = Batch.fromPagenumber(post_counts_and_trees, pagesize=settings['pagesize'], pagenumber=int(page)) return { 'api': template_api(self.context, self.request), 'macros': get_renderer('templates/macros.pt').implementation(), 'vote_items': votes_and_vote_objs, 'vote_data': vote_data, 'items': post_items, 'settings': settings, }
def batch(self): items = self.context.get_items() batch_size = self.context.batch_size or sys.maxsize page = int(self.request.get('%spage' % self.prefix, 0)) return Batch.fromPagenumber(items, batch_size, page + 1)
def articles(self, limit, genre='Current', all_articles=False, outstanding_optional=False, section='', batched=False, b_start=0): elements = {'outstanding': [], 'secondary': [], 'articles': []} catalog = getToolByName(self.context, 'portal_catalog') section = self.section(section) #query build getting all the section news query = {} query['object_provides'] = {'query': [INITF.__identifier__]} query['sort_on'] = 'effective' query['sort_order'] = 'reverse' query['genre'] = genre query['review_state'] = 'published' #query build for outstanding news outstanding = {} outstanding['object_provides'] = { 'query': [ISectionArticle.__identifier__]} outstanding['genre'] = genre outstanding['sort_on'] = 'effective' outstanding['sort_order'] = 'reverse' outstanding['sort_limit'] = 1 outstanding['review_state'] = 'published' if section: query['section'] = section outstanding['section'] = section existing = catalog.searchResults(query) if all_articles: if batched: if(len(existing) > (b_start-1)*limit): elements['articles'] = Batch.fromPagenumber(items=existing, pagesize=limit, pagenumber=b_start) else: elements['articles'] = [] else: elements['articles'] = existing[:limit] else: #queremos dividir los objetos en outstanding y otros if (existing and section) or (existing and outstanding_optional): outstanding_UID = 0 outstanding_results = catalog.searchResults(outstanding) move = 0 if outstanding_results: elements['outstanding'].append( outstanding_results[0].getObject()) outstanding_UID = outstanding_results[0].UID else: elements['outstanding'].append(existing[0].getObject()) move = 1 #batcheamos los objetos para paginacion if batched: if b_start > limit: move = 0 tmp_elements = filter(lambda nota: nota.UID != outstanding_UID, existing)[move:] if(len(existing) > (b_start-1)*limit): elements['secondary'] = Batch.fromPagenumber(items=tmp_elements, pagesize=limit, pagenumber=b_start) else: elements['secondary'] = [] else: elements['secondary'] = \ filter(lambda nota: nota.UID != outstanding_UID, existing)[move: limit + move] elif existing: #no es una seccion, sino una vista global elements['outstanding'] = [existing[0].getObject()] elements['secondary'] = existing[1: limit + 1] return elements