def search_people(context, request, from_, to, sort_col, sort_dir, filterLetter='', #_raw_get_container_batch=None # XXX funnel data from ux1 ): columns = [COLUMNS[colid] for colid in context.columns] columns_jsdata = get_column_jsdata(columns, GRID_WIDTH - SCROLLBAR_WIDTH) sort_index = COLUMNS[sort_col].sort_index kw = get_report_query(context, request, letter=filterLetter) try: batch = get_catalog_batch_grid(context, request, batch_start=from_, batch_size=to, sort_index=sort_index, reverse=sort_dir==-1, **kw ) except ParseError: # user entered something weird in the text search box. # show no results. batch = {'entries': [], 'total': 0} slickgrid_info = _slickgrid_info_from_ux2_batch(context, request, batch, columns, columns_jsdata, sort_col, sort_dir, filterLetter) return slickgrid_info['widget_options']['loadData']
def get_batch(context, request): """Return a batch of results and term sequence for a search request. If the user provided no terms, the returned batch will be None and the term sequence will be empty. """ batch = None terms = () kind = request.params.get("kind") if not kind: # Search form query, terms = make_query(context, request) if terms: context_path = model_path(context) if context_path and context_path != "/": query["path"] = {"query": context_path} principals = effective_principals(request) query["allowed"] = {"query": principals, "operator": "or"} batch = get_catalog_batch_grid(context, request, **query) else: # LiveSearch text_term = request.params.get("body") if text_term: searcher = queryUtility(IGroupSearchFactory, kind) if searcher is None: # If the 'kind' we got is not known, return an error fmt = "The LiveSearch group %s is not known" raise HTTPBadRequest(fmt % kind) batch = searcher(context, request, text_term).get_batch() terms = [text_term, kind] return batch, terms
def _show_communities_view_helper(context, request, prefix='', **kw): # Grab the data for the two listings, main communities and portlet communities_path = resource_path(context) query = dict(sort_index='title', interfaces=[ICommunity], path={ 'query': communities_path, 'depth': 1 }, allowed={ 'query': effective_principals(request), 'operator': 'or' }, **kw) qualifiers = [] titlestartswith = request.params.get('titlestartswith') if titlestartswith: query['titlestartswith'] = (titlestartswith, titlestartswith) qualifiers.append("Communities that begin with '%s'" % titlestartswith) body = request.params.get('body') if body: query['texts'] = body qualifiers.append('Search for "%s"' % body) error = None try: batch_info = get_catalog_batch_grid(context, request, **query) except ParseError, e: batch_info = {'entries': [], 'batching_required': False} error = 'Error: %s' % e
def search_people( context, request, from_, to, sort_col, sort_dir, filterLetter='', #_raw_get_container_batch=None # XXX funnel data from ux1 ): columns = [COLUMNS[colid] for colid in context.columns] columns_jsdata = get_column_jsdata(columns, GRID_WIDTH - SCROLLBAR_WIDTH) sort_index = COLUMNS[sort_col].sort_index kw = get_report_query(context, request, letter=filterLetter) try: batch = get_catalog_batch_grid(context, request, batch_start=from_, batch_size=to, sort_index=sort_index, reverse=sort_dir == -1, **kw) except ParseError: # user entered something weird in the text search box. # show no results. batch = {'entries': [], 'total': 0} slickgrid_info = _slickgrid_info_from_ux2_batch(context, request, batch, columns, columns_jsdata, sort_col, sort_dir, filterLetter) return slickgrid_info['widget_options']['loadData']
def _show_communities_view_helper(context, request, prefix='', **kw ): # Grab the data for the two listings, main communities and portlet communities_path = resource_path(context) query = dict( sort_index='title', interfaces=[ICommunity], path={'query': communities_path, 'depth': 1}, allowed={'query': effective_principals(request), 'operator': 'or'}, **kw ) qualifiers = [] titlestartswith = request.params.get('titlestartswith') if titlestartswith: query['titlestartswith'] = (titlestartswith, titlestartswith) qualifiers.append( _(u"Communities that begin with '${titlestartswith}'", mapping={'titlestartswith': titlestartswith})) body = request.params.get('body') if body: query['texts'] = body qualifiers.append('Search for "%s"' % body) error = None try: batch_info = get_catalog_batch_grid(context, request, **query) except ParseError, e: batch_info = {'entries': [], 'batching_required': False} error = _(u'Error: ${message}', mapping={'message': e})
def get_recent_items_batch(community, request, size=10): batch = get_catalog_batch_grid( community, request, interfaces=[ICommunityContent], sort_index="modified_date", reverse=True, batch_size=size, path={'query': resource_path(community)}, allowed={'query': effective_principals(request), 'operator': 'or'}, ) return batch
def get_catalog_events(context, request, searchterm=None, year=None, month=None, past_events=None): # Build up a query query = dict( path={'query': resource_path(context)}, allowed={'query': effective_principals(request), 'operator': 'or'}, interfaces=[ICalendarEvent], sort_index="start_date", reverse=True, use_cache=False, ) if searchterm is not None: query['texts'] = searchterm if searchterm is not None and year is None and month is None: # all years, all months, don't add anything to the query pass elif year is not None or month is not None: if year is not None: year = int(year) else: # No year given, assume this year year = datetime.datetime.now().year if month is not None: month = int(month) last_day = calendar.monthrange(year, month)[1] first_moment = coarse_datetime_repr( datetime.datetime(year, month, 1)) last_moment = coarse_datetime_repr( datetime.datetime(year, month, last_day, 23, 59, 59)) else: # No month given, search entire year first_moment = coarse_datetime_repr(datetime.datetime(year, 1, 1)) last_moment = coarse_datetime_repr(datetime.datetime(year+1, 1, 1)) query['start_date'] = (None, last_moment) query['end_date'] = (first_moment, None) else: # Show either all future or all past events now = coarse_datetime_repr(datetime.datetime.now()) if past_events: # Past; show the most recent first query['end_date'] = (None, now) query['reverse'] = True else: # Future; show the soonest first query['end_date'] = (now, None) query['reverse'] = False batch = get_catalog_batch_grid(context, request, **query) return batch
def get_grid_data(context, request, start=0, limit=12, sort_on=None, reverse=False, width=GRID_WIDTH): """Gets the data for the jquery report grid. """ columns = [COLUMNS[colid] for colid in context.columns] columns_jsdata = get_column_jsdata(columns, width - SCROLLBAR_WIDTH) if sort_on is None: sort_on = columns[0].id sort_index = COLUMNS[sort_on].sort_index kw = get_report_query(context, request) try: batch = get_catalog_batch_grid(context, request, batch_start=start, batch_size=limit, sort_index=sort_index, reverse=reverse, **kw) except ParseError: # user entered something weird in the text search box. # show no results. batch = {'entries': [], 'total': 0} slickgrid_info = _slickgrid_info_from_ux2_batch( context, request, batch, columns, columns_jsdata, sort_on, -1 if reverse else 1, kw.get('lastnamestartswith', '')) # Unfortunately, I find no good way to conditionally assemble the payload. # This means that we are wasting CPU to produce 2 (or 3?) sets of payload. records = [] for profile in batch['entries']: record = [col.render_html(profile, request) for col in columns] records.append(record) kw, _ = get_search_qualifiers(request) fetch_url = resource_url(context, request, 'jquery_grid', **kw) payload = dict( fetch_url=fetch_url, columns=columns_jsdata, records=records, totalRecords=batch['total'], batchSize=limit, width=width, sortColumn=sort_on, sortDirection=(reverse and 'desc' or 'asc'), allocateWidthForScrollbar=True, scrollbarWidth=SCROLLBAR_WIDTH, batch=batch, # ux2 with karlgrid (not slickgrid) slickgrid_info=slickgrid_info, # ux2 with karlgrid (slickgrid) ) return payload
def get_recent_items_batch(community, request, size=10): batch = get_catalog_batch_grid( community, request, interfaces=[ICommunityContent], sort_index="modified_date", reverse=True, batch_size=size, community=community, can_view={'query': effective_principals(request), 'operator': 'or'}, catalog_iface=ISQLCatalogSearch, ) return batch
def picture_view(context, request): sort_index = COLUMNS[context.columns[0]].sort_index kw = get_report_query(context, request) try: batch_info = get_catalog_batch_grid(context, request, batch_size=12, sort_index=sort_index, **kw) except ParseError, e: # user entered something weird in the text search box. # show no results. batch_info = {"entries": [], "total": 0, "batching_required": False}
def get_grid_data(context, request, start=0, limit=12, sort_on=None, reverse=False, width=GRID_WIDTH): """Gets the data for the jquery report grid. """ all_columns = getColumns() columns = [all_columns[colid] for colid in context.columns] columns_jsdata = get_column_jsdata(columns, width - SCROLLBAR_WIDTH) if sort_on is None: sort_on = columns[0].id sort_index = all_columns[sort_on].sort_index kw = get_report_query(context, request) try: batch = get_catalog_batch_grid(context, request, batch_start=start, batch_size=limit, sort_index=sort_index, reverse=reverse, **kw ) except ParseError: # user entered something weird in the text search box. # show no results. batch = {'entries': [], 'total': 0} slickgrid_info = _slickgrid_info_from_ux2_batch(context, request, batch, columns, columns_jsdata, sort_on, -1 if reverse else 1, kw.get('lastnamestartswith', '')) # Unfortunately, I find no good way to conditionally assemble the payload. # This means that we are wasting CPU to produce 2 (or 3?) sets of payload. records = [] for profile in batch['entries']: record = [col.render_html(profile, request) for col in columns] records.append(record) kw, _ = get_search_qualifiers(request) fetch_url = resource_url(context, request, 'jquery_grid', **kw) payload = dict( fetch_url=fetch_url, columns=columns_jsdata, records=records, totalRecords=batch['total'], batchSize=limit, width=width, sortColumn=sort_on, sortDirection=(reverse and 'desc' or 'asc'), allocateWidthForScrollbar=True, scrollbarWidth=SCROLLBAR_WIDTH, batch=batch, # ux2 with karlgrid (not slickgrid) slickgrid_info=slickgrid_info, # ux2 with karlgrid (slickgrid) ) return payload
def get_grid_data(context, request, start=0, limit=12, sort_on=None, reverse=False, width=GRID_WIDTH): """Gets the data for the jquery report grid. """ columns, sort_on, sort_index = get_report_columns(context, request, sort_on, width) columns_jsdata = get_column_jsdata(columns, width - SCROLLBAR_WIDTH) kw = get_report_query(context, request) try: if context is None: context = request.context batch = get_catalog_batch_grid(context, request, batch_start=start, batch_size=limit, sort_index=sort_index, reverse=reverse, **kw) except ParseError: # user entered something weird in the text search box. # show no results. batch = {'entries': [], 'total': 0} # Unfortunately, I find no good way to conditionally assemble the payload. # This means that we are wasting CPU to produce 2 (or 3?) sets of payload. records = [] for profile in batch['entries']: record = [col.render_html(profile, request) for col in columns] records.append(record) kw, _ = get_search_qualifiers(request) fetch_url = resource_url(context, request, 'jquery_grid', **kw) payload = dict( fetch_url=fetch_url, columns=columns_jsdata, records=records, totalRecords=batch['total'], batch=batch, batchSize=limit, width=width, sortColumn=sort_on, sortDirection=(reverse and 'desc' or 'asc'), allocateWidthForScrollbar=True, scrollbarWidth=SCROLLBAR_WIDTH, ) return payload
def get_topic_batch(forum, request): return get_catalog_batch_grid( forum, request, interfaces=[IForumTopic], reverse=True, path={'query': resource_path(forum)}, allowed={ 'query': effective_principals(request), 'operator': 'or' }, )
def get_catalog_news(context, request, searchterm=None, year=None, month=None): # Build up a query query = dict( path={'query': resource_path(context)}, allowed={ 'query': effective_principals(request), 'operator': 'or' }, interfaces=[INewsItem], sort_index="publication_date", reverse=True, ) if searchterm is not None: query['texts'] = searchterm now = coarse_datetime_repr(datetime.datetime.now()) if year is not None or month is not None: if year is not None: year = int(year) else: # No year given, assume this year year = datetime.datetime.now().year if month is not None: month = int(month) last_day = calendar.monthrange(year, month)[1] first_moment = coarse_datetime_repr( datetime.datetime(year, month, 1)) last_moment = coarse_datetime_repr( datetime.datetime(year, month, last_day, 23, 59, 59)) else: # No month given, search entire year first_moment = coarse_datetime_repr(datetime.datetime(year, 1, 1)) last_moment = coarse_datetime_repr( datetime.datetime(year + 1, 1, 1)) # Never show news items that aren't published yet last_moment = min(last_moment, now) query['publication_date'] = (first_moment, last_moment) else: # Don't show news from future query['publication_date'] = (None, now) batch = get_catalog_batch_grid(context, request, **query) return batch
def get_catalog_news(context, request, searchterm=None, year=None, month=None): # Build up a query query = dict( path={'query': resource_path(context)}, allowed={'query': effective_principals(request), 'operator': 'or'}, interfaces=[INewsItem], sort_index="publication_date", reverse=True, ) if searchterm is not None: query['texts'] = searchterm now = coarse_datetime_repr(datetime.datetime.now()) if year is not None or month is not None: if year is not None: year = int(year) else: # No year given, assume this year year = datetime.datetime.now().year if month is not None: month = int(month) last_day = calendar.monthrange(year, month)[1] first_moment = coarse_datetime_repr( datetime.datetime(year, month, 1)) last_moment = coarse_datetime_repr( datetime.datetime(year, month, last_day, 23, 59, 59)) else: # No month given, search entire year first_moment = coarse_datetime_repr(datetime.datetime(year, 1, 1)) last_moment = coarse_datetime_repr(datetime.datetime(year+1, 1, 1)) # Never show news items that aren't published yet last_moment = min(last_moment, now) query['publication_date'] = (first_moment, last_moment) else: # Don't show news from future query['publication_date'] = (None, now) batch = get_catalog_batch_grid(context, request, **query) return batch
def get_grid_data(context, request, start=0, limit=12, sort_on=None, reverse=False, width=GRID_WIDTH): """Gets the data for the jquery report grid. """ columns, sort_on, sort_index = get_report_columns( context, request, sort_on, width) columns_jsdata = get_column_jsdata(columns, width - SCROLLBAR_WIDTH) kw = get_report_query(context, request) try: if context is None: context = request.context batch = get_catalog_batch_grid(context, request, batch_start=start, batch_size=limit, sort_index=sort_index, reverse=reverse, **kw ) except ParseError: # user entered something weird in the text search box. # show no results. batch = {'entries': [], 'total': 0} # Unfortunately, I find no good way to conditionally assemble the payload. # This means that we are wasting CPU to produce 2 (or 3?) sets of payload. records = [] for profile in batch['entries']: record = [col.render_html(profile, request) for col in columns] records.append(record) kw, _ = get_search_qualifiers(request) fetch_url = resource_url(context, request, 'jquery_grid', **kw) payload = dict( fetch_url=fetch_url, columns=columns_jsdata, records=records, totalRecords=batch['total'], batch=batch, batchSize=limit, width=width, sortColumn=sort_on, sortDirection=(reverse and 'desc' or 'asc'), allocateWidthForScrollbar=True, scrollbarWidth=SCROLLBAR_WIDTH, ) return payload
def get_batch(context, request): """Return a batch of results and term sequence for a search request. If the user provided no terms, the returned batch will be None and the term sequence will be empty. """ batch = None terms = () query, terms = make_query(context, request) if terms: context_path = resource_path(context) if context_path and context_path != '/': query['path'] = {'query': context_path} batch = get_catalog_batch_grid(context, request, **query) return batch, terms
def show_communities_view(context, request): system_name = get_setting(context, 'system_name', 'KARL') page_title = '%s Communities' % system_name actions = [] if has_permission('create', context, request): actions.append(('Add Community', 'add_community.html')) api = TemplateAPI(context, request, page_title) # Grab the data for the two listings, main communities and portlet communities_path = model_path(context) query = dict( sort_index='title', interfaces=[ICommunity], path={'query': communities_path, 'depth': 1}, allowed={'query': effective_principals(request), 'operator': 'or'}, ) titlestartswith = request.params.get('titlestartswith') if titlestartswith: query['titlestartswith'] = (titlestartswith, titlestartswith) batch_info = get_catalog_batch_grid(context, request, **query) communities = [] for community in batch_info['entries']: adapted = getMultiAdapter((community, request), ICommunityInfo) communities.append(adapted) mgr = ILetterManager(context) letter_info = mgr.get_info(request) my_communities = get_my_communities(context, request) return render_template_to_response( 'templates/communities.pt', api=api, actions=actions, communities=communities, my_communities=my_communities, batch_info=batch_info, letters=letter_info, )
def _callFUT(self, context, request): from karl.views.batch import get_catalog_batch_grid return get_catalog_batch_grid(context, request)
def get_batch(self): return get_catalog_batch_grid( self.context, self.request, **self._makeCriteria())
def get_topic_batch(forum, request): return get_catalog_batch_grid( forum, request, interfaces=[IForumTopic], reverse=True, path={'query': model_path(forum)}, allowed={'query': effective_principals(request), 'operator': 'or'}, )