def _serviceSearch(self, limit=None, start=None, sort='name', dir='ASC', params=None, uid=None, criteria=()): cat = ICatalogTool(self._getObject(uid)) reverse = dir == 'DESC' qs = [] query = None if params: if 'name' in params: qs.append(MatchRegexp('name', '(?i).*%s.*' % params['name'])) if 'port' in params: qs.append(MatchRegexp('port', '(?i).*%s.*' % params['port'])) if qs: query = And(*qs) return cat.search("Products.ZenModel.ServiceClass.ServiceClass", start=start, limit=limit, orderby=sort, reverse=reverse, query=query)
def _getDeviceBatch(self, selectstatus='none', goodevids=[], badevids=[], offset=0, count=50, filter='', orderby='titleOrId', orderdir='asc'): unused(count, offset, orderby, orderdir) if not isinstance(goodevids, (list, tuple)): goodevids = [goodevids] if not isinstance(badevids, (list, tuple)): badevids = [badevids] if selectstatus == 'all': idquery = ~In('id', badevids) else: idquery = In('id', goodevids) devfilter = '(?is).*%s.*' % filter filterquery = Or(MatchRegexp('id', devfilter), MatchRegexp('name', devfilter), MatchRegexp('text_ipAddress', devfilter), MatchRegexp('deviceClassPath', devfilter)) query = Eq('uid', self.context.absolute_url_path()) & idquery query = query & filterquery catalog = IModelCatalogTool(self.context) objects = catalog.search(query=query) return [x['id'] for x in objects]
def _getAdvancedQueryDeviceList(self, offset=0, count=50, filter='', orderby='name', orderdir='asc'): """ Ask the catalog for devices matching the criteria specified. """ context = self.context if not isinstance(context, DeviceOrganizer): context = self.context.dmd.Devices catalog = IModelCatalogTool(context).devices devfilter = '(?is).*%s.*' % filter filterquery = Or(MatchRegexp('id', devfilter), MatchRegexp('name', devfilter), MatchRegexp('text_ipAddress', devfilter), MatchRegexp('deviceClassPath', devfilter)) query = Eq('uid', context.absolute_url_path()) & filterquery objects = catalog.search(query=query, start=int(offset), limit=int(count), order_by=orderby, reverse=orderdir != 'asc') objects = list(objects) totalCount = len(objects) return totalCount, objects
def _processSearch(self, limit=None, start=None, sort='name', dir='ASC', params=None, uid=None, criteria=()): ob = self._getObject(uid) if isinstance(uid, basestring) else uid cat = ICatalogTool(ob) reverse = dir == 'DESC' qs = [] query = None if params: if 'name' in params: qs.append(MatchRegexp('name', '(?i).*%s.*' % params['name'])) if qs: query = And(*qs) return cat.search("Products.ZenModel.OSProcessClass.OSProcessClass", start=start, limit=limit, orderby=sort, reverse=reverse, query=query)
def getDeviceBrains(self, uid=None, start=0, limit=50, sort='name', dir='ASC', params=None, hashcheck=None): cat = ICatalogTool(self._getObject(uid)) reverse = dir=='DESC' qs = [] query = None globFilters = {} if params is None: params = {} for key, value in params.iteritems(): if key == 'ipAddress': ip = ensureIp(value) try: checkip(ip) except IpAddressError: pass else: if numbip(ip): minip, maxip = getSubnetBounds(ip) qs.append(Between('ipAddress', str(minip), str(maxip))) elif key == 'deviceClass': qs.append(MatchRegexp('uid', '(?i).*%s.*' % value)) elif key == 'productionState': qs.append(Or(*[Eq('productionState', str(state)) for state in value])) else: globFilters[key] = value if qs: query = And(*qs) brains = cat.search('Products.ZenModel.Device.Device', start=start, limit=limit, orderby=sort, reverse=reverse, query=query, globFilters=globFilters, hashcheck=hashcheck) return brains
def findMatchingOrganizers(self, organizerClass, organizerPath, userFilter): filterRegex = '(?i)^%s.*%s.*' % (organizerPath, userFilter) if self.validRegex(filterRegex): orgquery = (Eq('objectImplements','Products.ZenModel.%s.%s' % (organizerClass, organizerClass)) & MatchRegexp('uid', filterRegex)) paths = [b.getPath() for b in ICatalogTool(self._dmd).search(query=orgquery)] if paths: return Generic('path', {'query':paths})
def getAffectedServices(self): """Generate WinService instances to which this datasource is bound.""" template = self.rrdTemplate().primaryAq() deviceclass = template.deviceClass() # Template is local to a specific service. if deviceclass is None: yield template.getPrimaryParent() # Template is in a device class. else: query = None # Let's be smart and get only what will be affected if template.id == 'WinService': for exp in self.in_exclusions.split(','): regex = exp.strip().lstrip('+-') try: re.compile(regex) except re.error: log.debug(INVALID_REGEX.format(self.id, regex)) continue if not query: query = MatchRegexp('id', regex) else: query |= MatchRegexp('id', regex) # this should not occur, but just in case if not query: query = MatchRegexp('id', '.*') else: # component template for specific service query = MatchRegexp('id', template.id) results = ICatalogTool(deviceclass.primaryAq()).search(WinService, query=query) for result in results: try: service = result.getObject() except Exception: continue if service.getRRDTemplate() == template: yield service
def findMatchingOrganizers(self, organizerClass, organizerPath, userFilter): filterRegex = '(?i)^%s.*%s.*' % (organizerPath, userFilter) filterRegex = '/zport/dmd/{0}*{1}*'.format(organizerPath, userFilter) if self.validRegex(filterRegex): orgquery = (Eq( 'objectImplements', 'Products.ZenModel.%s.%s' % (organizerClass, organizerClass)) & MatchRegexp('uid', filterRegex)) paths = [ "{0}".format(b.getPath()) for b in IModelCatalogTool(self._dmd).search(query=orgquery) ] if paths: return In('path', paths)
def getWatchListTargets(self, uid, query=""): results = self.getSubOrganizers(uid) if query: results = [ o for o in results if query.lower() in o.fullOrganizerName.lower() ] device_query = MatchRegexp("titleOrId", ".*" + query + ".*") else: device_query = None queryResults = self._search_device_catalog(device_query) devices = (obj.getObject() for obj in queryResults) devices = (IInfo(dev) for dev in devices if dev.checkRemotePerm(ZEN_VIEW, dev)) results.extend(itertools.islice(devices, 50)) return results
def _process_request(self): # Use this function from a template that is using bika_listing_table # in such a way that the table_only request var will be used to # in-place-update the table. form_id = self.form_id form = self.request.form workflow = getToolByName(self.context, 'portal_workflow') catalog = getToolByName(self.context, self.catalog) # Some ajax calls duplicate form values? I have not figured out why! if self.request.form: for key, value in self.request.form.items(): if isinstance(value, list): self.request.form[key] = self.request.form[key][0] # If table_only specifies another form_id, then we abort. # this way, a single table among many can request a redraw, # and only it's content will be rendered. if form_id not in self.request.get('table_only', form_id): return '' ## review_state_selector - value can be specified in request selected_state = self.request.get("%s_review_state" % form_id, 'default') # get review_state id=selected_state states = [r for r in self.review_states if r['id'] == selected_state] self.review_state = states and states[0] or self.review_states[0] # set selected review_state ('default'?) to request self.request['review_state'] = self.review_state['id'] # contentFilter is expected in every self.review_state. for k, v in self.review_state['contentFilter'].items(): self.contentFilter[k] = v # sort on self.sort_on = self.request.get(form_id + '_sort_on', None) # manual_sort_on: only sort the current batch of items # this is a compromise for sorting without column indexes self.manual_sort_on = None if self.sort_on \ and self.sort_on in self.columns.keys() \ and self.columns[self.sort_on].get('index', None): idx = self.columns[self.sort_on].get('index', self.sort_on) self.contentFilter['sort_on'] = idx else: if self.sort_on: self.manual_sort_on = self.sort_on if 'sort_on' in self.contentFilter: del self.contentFilter['sort_on'] # sort order self.sort_order = self.request.get(form_id + '_sort_order', '') if self.sort_order: self.contentFilter['sort_order'] = self.sort_order else: if 'sort_order' not in self.contentFilter: self.sort_order = 'ascending' self.contentFilter['sort_order'] = 'ascending' self.request.set(form_id+'_sort_order', 'ascending') else: self.sort_order = self.contentFilter['sort_order'] if self.manual_sort_on: del self.contentFilter['sort_order'] # pagesize pagesize = self.request.get(form_id + '_pagesize', self.pagesize) if type(pagesize) in (list, tuple): pagesize = pagesize[0] try: pagesize = int(pagesize) except: pagesize = self.pagesize = 10 self.pagesize = pagesize # Plone's batching wants this variable: self.request.set('pagesize', self.pagesize) # and we want to make our choice remembered in bika_listing also self.request.set(self.form_id + '_pagesize', self.pagesize) # pagenumber self.pagenumber = int(self.request.get(form_id + '_pagenumber', self.pagenumber)) # Plone's batching wants this variable: self.request.set('pagenumber', self.pagenumber) # index filters. self.And = [] self.Or = [] ##logger.info("contentFilter: %s"%self.contentFilter) for k, v in self.columns.items(): if not v.has_key('index') \ or v['index'] == 'review_state' \ or v['index'] in self.filter_indexes: continue self.filter_indexes.append(v['index']) ##logger.info("Filter indexes: %s"%self.filter_indexes) # any request variable named ${form_id}_{index_name} # will pass it's value to that index in self.contentFilter. # all conditions using ${form_id}_{index_name} are searched with AND for index in self.filter_indexes: idx = catalog.Indexes.get(index, None) if not idx: logger.debug("index named '%s' not found in %s. " "(Perhaps the index is still empty)." % (index, self.catalog)) continue request_key = "%s_%s" % (form_id, index) value = self.request.get(request_key, '') if len(value) > 1: ##logger.info("And: %s=%s"%(index, value)) if idx.meta_type in('ZCTextIndex', 'FieldIndex'): self.And.append(MatchRegexp(index, value)) elif idx.meta_type == 'DateIndex': logger.info("Unhandled DateIndex search on '%s'"%index) continue else: self.Or.append(Generic(index, value)) # if there's a ${form_id}_filter in request, then all indexes # are are searched for it's value. # ${form_id}_filter is searched with OR agains all indexes request_key = "%s_filter" % form_id value = self.request.get(request_key, '') if type(value) in (list, tuple): value = value[0] if len(value) > 1: for index in self.filter_indexes: idx = catalog.Indexes.get(index, None) if not idx: logger.debug("index named '%s' not found in %s. " "(Perhaps the index is still empty)." % (index, self.catalog)) continue ##logger.info("Or: %s=%s"%(index, value)) if idx.meta_type in('ZCTextIndex', 'FieldIndex'): self.Or.append(MatchRegexp(index, value)) # https://github.com/bikalabs/Bika-LIMS/issues/1069 vals = value.split('-') if len(vals) > 2: valroot = vals[0] for i in range(1, len(vals)): valroot = '%s-%s' % (valroot, vals[i]) self.Or.append(MatchRegexp(index, valroot+'-*')) elif idx.meta_type == 'DateIndex': if type(value) in (list, tuple): value = value[0] if value.find(":") > -1: try: lohi = [DateTime(x) for x in value.split(":")] except: logger.info("Error (And, DateIndex='%s', term='%s')"%(index,value)) self.Or.append(Between(index, lohi[0], lohi[1])) else: try: self.Or.append(Eq(index, DateTime(value))) except: logger.info("Error (Or, DateIndex='%s', term='%s')"%(index,value)) else: self.Or.append(Generic(index, value)) self.Or.append(MatchRegexp('review_state', value)) # get toggle_cols cookie value # and modify self.columns[]['toggle'] to match. toggle_cols = self.get_toggle_cols() for col in self.columns.keys(): if col in toggle_cols: self.columns[col]['toggle'] = True else: self.columns[col]['toggle'] = False
def search(self, types=(), start=0, limit=None, orderby=None, reverse=False, paths=(), depth=None, query=None, hashcheck=None, filterPermissions=True, globFilters=None): # if orderby is not an index then _queryCatalog, then query results # will be unbrained and sorted areBrains = orderby in self.catalog._catalog.indexes or orderby is None queryOrderby = orderby if areBrains else None infoFilters = {} if globFilters: for key, value in globFilters.iteritems(): if self.catalog.hasIndexForTypes(types, key): if query: query = And(query, MatchRegexp(key, '(?i).*%s.*' % value)) else: query = MatchRegexp(key, '(?i).*%s.*' % value) else: areBrains = False infoFilters[key] = value try: queryResults = self._queryCatalog(types, queryOrderby, reverse, paths, depth, query, filterPermissions) except sre_constants.error: # if there is an invalid regex in the query return an empty list log.error("Invalid regex in the following query: %s" % query) queryResults = [] # see if we need to filter by waking up every object if infoFilters: queryResults = self._filterQueryResults(queryResults, infoFilters) totalCount = len(queryResults) hash_ = totalCount if areBrains or not queryResults: allResults = queryResults else: allResults = self._sortQueryResults(queryResults, orderby, reverse) if hashcheck is not None: if hash_ != int(hashcheck): raise StaleResultsException("Search results do not match") # Return a slice start = max(start, 0) if limit is None: stop = None else: stop = start + limit results = islice(allResults, start, stop) return SearchResults(results, totalCount, str(hash_), areBrains)
def _process_request(self): """Scan request for parameters and configure class attributes accordingly. Setup AdvancedQuery or catalog contentFilter. Request parameters: <form_id>_limit_from: index of the first item to display <form_id>_rows_only: returns only the rows <form_id>_sort_on: list items are sorted on this key <form_id>_manual_sort_on: no index - sort with python <form_id>_pagesize: number of items <form_id>_filter: A string, will be regex matched against indexes in <form_id>_filter_indexes <form_id>_filter_indexes: list of index names which will be searched for the value of <form_id>_filter <form_id>_<index_name>: Any index name can be used after <form_id>_. any request variable named ${form_id}_{index_name} will pass it's value to that index in self.contentFilter. All conditions using ${form_id}_{index_name} are searched with AND. The parameter value will be matched with regexp if a FieldIndex or TextIndex. Else, AdvancedQuery.Generic is used. """ form_id = self.form_id form = self.request.form workflow = getToolByName(self.context, 'portal_workflow') catalog = getToolByName(self.context, self.catalog) # Some ajax calls duplicate form values? I have not figured out why! if self.request.form: for key, value in self.request.form.items(): if isinstance(value, list): self.request.form[key] = self.request.form[key][0] # If table_only specifies another form_id, then we abort. # this way, a single table among many can request a redraw, # and only it's content will be rendered. if form_id not in self.request.get('table_only', form_id) \ or form_id not in self.request.get('rows_only', form_id): return '' self.rows_only = self.request.get('rows_only','') == form_id self.limit_from = int(self.request.get(form_id + '_limit_from',0)) # contentFilter is allowed in every self.review_state. for k, v in self.review_state.get('contentFilter', {}).items(): self.contentFilter[k] = v # sort on self.sort_on = self.sort_on \ if hasattr(self, 'sort_on') and self.sort_on \ else None self.sort_on = self.request.get(form_id + '_sort_on', self.sort_on) self.sort_order = self.request.get(form_id + '_sort_order', 'ascending') self.manual_sort_on = self.request.get(form_id + '_manual_sort_on', None) if self.sort_on: if self.sort_on in self.columns.keys(): if self.columns[self.sort_on].get('index', None): self.request.set(form_id+'_sort_on', self.sort_on) # The column can be sorted directly using an index idx = self.columns[self.sort_on]['index'] self.sort_on = idx # Don't sort manually! self.manual_sort_on = None else: # The column must be manually sorted using python self.manual_sort_on = self.sort_on else: # We cannot sort for a column that doesn't exist! msg = "{}: sort_on is '{}', not a valid column".format( self, self.sort_on) logger.error(msg) self.sort_on = None if self.manual_sort_on: self.manual_sort_on = self.manual_sort_on[0] \ if type(self.manual_sort_on) in (list, tuple) \ else self.manual_sort_on if self.manual_sort_on not in self.columns.keys(): # We cannot sort for a column that doesn't exist! msg = "{}: manual_sort_on is '{}', not a valid column".format( self, self.manual_sort_on) logger.error(msg) self.manual_sort_on = None if self.sort_on or self.manual_sort_on: # By default, if sort_on is set, sort the items ASC # Trick to allow 'descending' keyword instead of 'reverse' self.sort_order = 'reverse' if self.sort_order \ and self.sort_order[0] in ['d','r'] \ else 'ascending' else: # By default, sort on created self.sort_order = 'reverse' self.sort_on = 'created' self.contentFilter['sort_order'] = self.sort_order if self.sort_on: self.contentFilter['sort_on'] = self.sort_on # pagesize pagesize = self.request.get(form_id + '_pagesize', self.pagesize) if type(pagesize) in (list, tuple): pagesize = pagesize[0] try: pagesize = int(pagesize) except: pagesize = self.pagesize = 10 self.pagesize = pagesize # Plone's batching wants this variable: self.request.set('pagesize', self.pagesize) # and we want to make our choice remembered in bika_listing also self.request.set(self.form_id + '_pagesize', self.pagesize) # index filters. self.And = [] self.Or = [] ##logger.info("contentFilter: %s"%self.contentFilter) for k, v in self.columns.items(): if not v.has_key('index') \ or v['index'] == 'review_state' \ or v['index'] in self.filter_indexes: continue self.filter_indexes.append(v['index']) ##logger.info("Filter indexes: %s"%self.filter_indexes) # any request variable named ${form_id}_{index_name} # will pass it's value to that index in self.contentFilter. # all conditions using ${form_id}_{index_name} are searched with AND for index in self.filter_indexes: idx = catalog.Indexes.get(index, None) if not idx: logger.debug("index named '%s' not found in %s. " "(Perhaps the index is still empty)." % (index, self.catalog)) continue request_key = "%s_%s" % (form_id, index) value = self.request.get(request_key, '') if len(value) > 1: ##logger.info("And: %s=%s"%(index, value)) if idx.meta_type in('ZCTextIndex', 'FieldIndex'): self.And.append(MatchRegexp(index, value)) elif idx.meta_type == 'DateIndex': logger.info("Unhandled DateIndex search on '%s'"%index) continue else: self.Or.append(Generic(index, value)) # if there's a ${form_id}_filter in request, then all indexes # are are searched for it's value. # ${form_id}_filter is searched with OR agains all indexes request_key = "%s_filter" % form_id value = self.request.get(request_key, '') if type(value) in (list, tuple): value = value[0] if len(value) > 1: for index in self.filter_indexes: idx = catalog.Indexes.get(index, None) if not idx: logger.debug("index named '%s' not found in %s. " "(Perhaps the index is still empty)." % (index, self.catalog)) continue ##logger.info("Or: %s=%s"%(index, value)) if idx.meta_type in('ZCTextIndex', 'FieldIndex'): self.Or.append(MatchRegexp(index, value)) self.expand_all_categories = True # https://github.com/bikalabs/Bika-LIMS/issues/1069 vals = value.split('-') if len(vals) > 2: valroot = vals[0] for i in range(1, len(vals)): valroot = '%s-%s' % (valroot, vals[i]) self.Or.append(MatchRegexp(index, valroot+'-*')) self.expand_all_categories = True elif idx.meta_type == 'DateIndex': if type(value) in (list, tuple): value = value[0] if value.find(":") > -1: try: lohi = [DateTime(x) for x in value.split(":")] except: logger.info("Error (And, DateIndex='%s', term='%s')"%(index,value)) self.Or.append(Between(index, lohi[0], lohi[1])) self.expand_all_categories = True else: try: self.Or.append(Eq(index, DateTime(value))) self.expand_all_categories = True except: logger.info("Error (Or, DateIndex='%s', term='%s')"%(index,value)) else: self.Or.append(Generic(index, value)) self.expand_all_categories = True self.Or.append(MatchRegexp('review_state', value)) # get toggle_cols cookie value # and modify self.columns[]['toggle'] to match. toggle_cols = self.get_toggle_cols() for col in self.columns.keys(): if col in toggle_cols: self.columns[col]['toggle'] = True else: self.columns[col]['toggle'] = False
def run(self, dmd): """Run the report, returning an Availability object for each device""" # Note: we don't handle overlapping "down" events, so down # time could get get double-counted. __pychecker__ = 'no-local' rf2Filename = zenhome + '/log/AvailabilityRep2.out' rf2 = open(rf2Filename, 'w') rf2.write('CReport - in run\n') now = time.time() zep = getFacade("zep", dmd) rf2.write('CReport - start of run \n') path = '/zport/dmd/' pathFilterList = [ Generic( 'path', {'query': ''.join([path, 'Devices', self.DeviceClass or ''])}) ] if self.Location: pathFilterList.append( Generic( 'path', {'query': ''.join([path, 'Locations', self.Location])})) if self.System: pathFilterList.append( Generic('path', {'query': ''.join([path, 'Systems', self.System])})) if self.DeviceGroup: pathFilterList.append( Generic( 'path', {'query': ''.join([path, 'Groups', self.DeviceGroup])})) if self.device: #pathFilterList.append(Or(Eq('name', self.device), Eq('id', self.device))) # For regex match of device name pathFilterList.append( Or(MatchRegexp('name', '(?i).*%s.*' % self.device), MatchRegexp('id', '(?i).*%s.*' % self.device))) results = ICatalogTool(dmd.Devices).search( types='Products.ZenModel.Device.Device', query=And(*pathFilterList)) rf2.write('pathFilterList is %s \n' % (pathFilterList)) rf2.write('results is %s \n' % (results)) if not results.total: return [] deviceList = {} tag_uuids = [] accumulator = defaultdict(int) for brain in results: try: obj = brain.getObject() deviceList[obj.id] = obj tag_uuids.append(brain.uuid) accumulator[obj.id] = 0 rf2.write("obj is %s uuid is %s \n" % (brain.getObject(), brain.uuid)) except Exception: log.warn("Unable to unbrain at path %s", brain.getPath()) rf2.write("Unable to unbrain at path %s", brain.getPath()) endDate = self.endDate or AvailabilityColl.getDefaultAvailabilityEnd() endDate = min(endDate, now) startDate = self.startDate if not startDate: startDate = AvailabilityColl.getDefaultAvailabilityStart(dmd) # convert start and end date to integer milliseconds for defining filters startDate = int(startDate * 1000) endDate = int(endDate * 1000) total_report_window = endDate - startDate create_filter_args = { 'operator': zep.AND, 'severity': _severityGreaterThanOrEqual(self.severity), 'event_class': self.eventClass + ('/' if not self.eventClass.endswith('/') else '') } if self.agent: create_filter_args['agent'] = self.agent if self.monitor is not None: create_filter_args['monitor'] = self.monitor rf2.write( 'device is %s, component is %s, location is %s priority is %s, prodState is %s \n' % (self.device, self.component, self.Location, self.DevicePriority, self.prodState)) # add filters on details filter_details = {} if self.DevicePriority is not None: filter_details[ 'zenoss.device.priority'] = "%d:" % self.DevicePriority if self.prodState: filter_details[ 'zenoss.device.production_state'] = "%d:" % self.prodState if filter_details: create_filter_args['details'] = filter_details create_filter_args['tags'] = tag_uuids # query zep for matching event summaries # 1. get all open events that: # - first_seen < endDate # (only need to check active events) # 2. get all closed events that: # - first_seen < endDate # - status_change > startDate # (must get both active and archived events) # 1. get open events create_filter_args['first_seen'] = (0, endDate) create_filter_args['status'] = OPEN_EVENT_STATUSES rf2.write(' create_filter_args dictionary for open events is %s \n' % (create_filter_args)) event_filter = zep.createEventFilter(**create_filter_args) open_events = zep.getEventSummariesGenerator(event_filter) # 2. get closed events create_filter_args['status_change'] = (startDate + 1, ) create_filter_args['status'] = CLOSED_EVENT_STATUSES rf2.write(' create_filter_args dictionary for closed events is %s \n' % (create_filter_args)) event_filter = zep.createEventFilter(**create_filter_args) closed_events = zep.getEventSummariesGenerator(event_filter) # must also get events from archive closed_events_from_archive = zep.getEventSummariesGenerator( event_filter, archive=True) # Don't put print / log statements in the next block # Previous block uses a Python generator function to deliver events asynchronously for evtsumm in chain(open_events, closed_events, closed_events_from_archive): first = evtsumm['first_seen_time'] # if event is still open, downtime persists til end of report window if evtsumm['status'] not in CLOSED_EVENT_STATUSES: last = endDate else: last = evtsumm['status_change_time'] # discard any events that have no elapsed time if first == last: continue # clip first and last within report time window first = max(first, startDate) last = min(last, endDate) evt = evtsumm['occurrence'][0] evt_actor = evt['actor'] device = evt_actor.get('element_identifier') accumulator[device] += (last - first) availabilityReport = [] for deviceId, downtime in sorted(accumulator.items()): device = deviceList.get(deviceId, None) if device: sysname = device.getSystemNamesString() groupname = ', '.join(device.getDeviceGroupNames()) loc = device.getLocationName() dclass = device.getDeviceClassPath() link = device.getDeviceLink() title = device.titleOrId() availabilityReport.append( AvailabilityColl(deviceId, '', downtime, total_report_window, groupname, sysname, loc, dclass, link, title)) device._p_invalidate() # rf.close() return availabilityReport
def _process_request(self): # Use this function from a template that is using bika_listing_table # in such a way that the table_only request var will be used to # in-place-update the table. form_id = self.form_id form = self.request.form workflow = getToolByName(self.context, 'portal_workflow') catalog = getToolByName(self.context, self.catalog) # If table_only specifies another form_id, then we abort. # this way, a single table among many can request a redraw, # and only it's content will be rendered. if form_id not in self.request.get('table_only', form_id): return '' ## review_state_selector cookie = json.loads(self.request.get("review_state", '{}')) cookie_key = "%s%s" % (self.context.portal_type, form_id) # first check POST selected_state = self.request.get("%s_review_state" % form_id, '') if not selected_state: # then check cookie selected_state = cookie.get(cookie_key, 'default') # get review_state id=selected_state states = [r for r in self.review_states if r['id'] == selected_state] review_state = states and states[0] or self.review_states[0] # set request and cookie to currently selected state id if not selected_state: selected_state = self.review_states[0]['id'] self.review_state = cookie[cookie_key] = selected_state cookie = json.dumps(cookie) self.request['review_state'] = cookie self.request.response.setCookie('review_state', cookie, path="/") # contentFilter is expected in every review_state. for k, v in review_state['contentFilter'].items(): self.contentFilter[k] = v # sort on sort_on = self.request.get(form_id + '_sort_on', '') # manual_sort_on: only sort the current batch of items # this is a compromise for sorting without column indexes self.manual_sort_on = None if sort_on \ and sort_on in self.columns.keys() \ and self.columns[sort_on].get('index', None): idx = self.columns[sort_on].get('index', sort_on) self.contentFilter['sort_on'] = idx else: if sort_on: self.manual_sort_on = sort_on if 'sort_on' in self.contentFilter: del self.contentFilter['sort_on'] # sort order self.sort_order = self.request.get(form_id + '_sort_order', '') if self.sort_order: self.contentFilter['sort_order'] = self.sort_order else: if 'sort_order' not in self.contentFilter: self.sort_order = 'ascending' self.contentFilter['sort_order'] = 'ascending' self.request.set(form_id + '_sort_order', 'ascending') else: self.sort_order = self.contentFilter['sort_order'] if self.manual_sort_on: del self.contentFilter['sort_order'] # pagesize pagesize = self.request.get(form_id + '_pagesize', self.pagesize) if type(pagesize) in (list, tuple): pagesize = pagesize[0] try: pagesize = int(pagesize) except: pagesize = self.pagesize self.pagesize = pagesize # Plone's batching wants this variable: self.request.set('pagesize', self.pagesize) # pagenumber self.pagenumber = int( self.request.get(form_id + '_pagenumber', self.pagenumber)) # Plone's batching wants this variable: self.request.set('pagenumber', self.pagenumber) # index filters. self.And = [] self.Or = [] ##logger.info("contentFilter: %s"%self.contentFilter) for k, v in self.columns.items(): if not v.has_key('index') \ or v['index'] == 'review_state' \ or v['index'] in self.filter_indexes: continue self.filter_indexes.append(v['index']) ##logger.info("Filter indexes: %s"%self.filter_indexes) # any request variable named ${form_id}_{index_name} # will pass it's value to that index in self.contentFilter. # all conditions using ${form_id}_{index_name} are searched with AND for index in self.filter_indexes: idx = catalog.Indexes.get(index, None) if not idx: logger.debug("index named '%s' not found in %s. " "(Perhaps the index is still empty)." % (index, self.catalog)) continue request_key = "%s_%s" % (form_id, index) value = self.request.get(request_key, '') if len(value) > 1: ##logger.info("And: %s=%s"%(index, value)) if idx.meta_type in ('ZCTextIndex', 'FieldIndex'): self.And.append(MatchRegexp(index, value)) elif idx.meta_type == 'DateIndex': logger.info("Unhandled DateIndex search on '%s'" % index) continue else: self.Or.append(Generic(index, value)) # if there's a ${form_id}_filter in request, then all indexes # are are searched for it's value. # ${form_id}_filter is searched with OR agains all indexes request_key = "%s_filter" % form_id value = self.request.get(request_key, '') if len(value) > 1: for index in self.filter_indexes: idx = catalog.Indexes.get(index, None) if not idx: logger.debug("index named '%s' not found in %s. " "(Perhaps the index is still empty)." % (index, self.catalog)) continue ##logger.info("Or: %s=%s"%(index, value)) if idx.meta_type in ('ZCTextIndex', 'FieldIndex'): self.Or.append(MatchRegexp(index, value)) elif idx.meta_type == 'DateIndex': if value.find(":") > -1: try: lohi = [DateTime(x) for x in value.split(":")] except: logger.info( "Error (And, DateIndex='%s', term='%s')" % (index, value)) self.Or.append(Between(index, lohi[0], lohi[1])) else: try: self.Or.append(Eq(index, DateTime(value))) except: logger.info( "Error (Or, DateIndex='%s', term='%s')" % (index, value)) else: self.Or.append(Generic(index, value)) self.Or.append(MatchRegexp('review_state', value)) # get toggle_cols cookie value # and modify self.columns[]['toggle'] to match. toggle_cols = self.get_toggle_cols() for col in self.columns.keys(): if col in toggle_cols: self.columns[col]['toggle'] = True else: self.columns[col]['toggle'] = False
def _build_query(self, types=(), paths=(), depth=None, query=None, filterPermissions=True, globFilters=None): """ Build and AdvancedQuery query @params types: list/tuple of values for objectImplements field @params globFilters: dict with user passed field: value filters @params query: AdvancedQuery passed by the user. Most of the time None @param filterPermissions: Boolean indicating whether to check for user perms or not @return: tuple (AdvancedQuery query, not indexed filters dict) """ indexed, stored, _ = self.model_catalog_client.get_indexes() not_indexed_user_filters = {} # Filters that use not indexed fields user_filters_query = None types_query = None paths_query = None permissions_query = None partial_queries = [] if query: """ # if query is a dict, we convert it to AdvancedQuery # @TODO We should make the default query something other than AdvancedQuery subqueries = [] if isinstance(query, dict): for attr, value in query.iteritems(): if isinstance(value, str) and '*' in value: subqueries.append(MatchGlob(attr, value)) else: subqueries.append(Eq(attr, value)) query = And(*subqueries) partial_queries.append(query) """ partial_queries.append(self._parse_user_query(query)) # Build query from filters passed by user if globFilters: for key, value in globFilters.iteritems(): if key in indexed: if user_filters_query: user_filters_query = And(user_filters_query, MatchRegexp(key, value)) else: user_filters_query = MatchRegexp(key, value) else: not_indexed_user_filters[key] = value if user_filters_query: partial_queries.append(user_filters_query) # Build the objectImplements query if not isinstance(types, (tuple, list)): types = (types, ) types_query_list = [ Eq('objectImplements', dottedname(t)) for t in types ] if types_query_list: if len(types_query_list) > 1: types_query = Or(*types_query_list) else: types_query = types_query_list[0] partial_queries.append(types_query) # Build query for paths if paths is not False: # When paths is False we dont add any path condition # TODO: Account for depth or get rid of it # TODO: Consider indexing the device's uid as a path context_path = '/'.join(self.context.getPrimaryPath()) uid_path_query = In( 'path', (context_path, ) ) # MatchGlob(UID, context_path) # Add the context uid as filter partial_queries.append(uid_path_query) if paths: if isinstance(paths, basestring): paths = (paths, ) partial_queries.append(In('path', paths)) """ OLD CODE. Why this instead of In? What do we need depth for? q = {'query':paths} if depth is not None: q['depth'] = depth paths_query = Generic('path', q) """ # filter based on permissions if filterPermissions and allowedRolesAndGroups(self.context): permissions_query = In('allowedRolesAndUsers', allowedRolesAndGroups(self.context)) partial_queries.append(permissions_query) # Put together all queries search_query = And(*partial_queries) return (search_query, not_indexed_user_filters)
def __call__(self, result=None, specification=None, **kwargs): searchTerm = _c(self.request.get('searchTerm', '')).lower() force_all = self.request.get('force_all', 'false') searchFields = 'search_fields' in self.request \ and json.loads(_u(self.request.get('search_fields', '[]'))) \ or ('Title',) # lookup objects from ZODB catalog_name = _c(self.request.get('catalog_name', 'portal_catalog')) catalog = getToolByName(self.context, catalog_name) # json.loads does unicode conversion, which will fail in the catalog # search for some cases. So we need to convert the strings to utf8 # see: https://github.com/senaite/bika.lims/issues/443 base_query = json.loads(self.request['base_query']) search_query = json.loads(self.request.get('search_query', "{}")) base_query = self.to_utf8(base_query) search_query = self.to_utf8(search_query) # first with all queries contentFilter = dict((k, v) for k, v in base_query.items()) contentFilter.update(search_query) # Sorted by? (by default, Title) sort_on = self.request.get('sidx', 'Title') if sort_on == 'Title': sort_on = 'sortable_title' if sort_on: # Check if is an index and if is sortable. Otherwise, assume the # sorting must be done manually index = catalog.Indexes.get(sort_on, None) if index and index.meta_type in ['FieldIndex', 'DateIndex']: contentFilter['sort_on'] = sort_on # Sort order? sort_order = self.request.get('sord', 'asc') if (sort_order in ['desc', 'reverse', 'rev', 'descending']): contentFilter['sort_order'] = 'descending' else: contentFilter['sort_order'] = 'ascending' # Can do a search for indexes? criterias = [] fields_wo_index = [] if searchTerm: for field_name in searchFields: index = catalog.Indexes.get(field_name, None) if not index: fields_wo_index.append(field_name) continue if index.meta_type in ('ZCTextIndex'): if searchTerm.isspace(): # earchTerm != ' ' added because of # https://github.com/plone/Products.CMFPlone/issues # /1537 searchTerm = '' continue else: temp_st = searchTerm + '*' criterias.append(MatchRegexp(field_name, temp_st)) elif index.meta_type in ('FieldIndex'): criterias.append(MatchRegexp(field_name, searchTerm)) elif index.meta_type == 'DateIndex': msg = "Unhandled DateIndex search on '%s'" % field_name from bika.lims import logger logger.warn(msg) else: criterias.append(Generic(field_name, searchTerm)) if criterias: # Advanced search advanced_query = catalog.makeAdvancedQuery(contentFilter) aq_or = Or() for criteria in criterias: aq_or.addSubquery(criteria) advanced_query &= aq_or brains = catalog.evalAdvancedQuery(advanced_query) else: brains = catalog(contentFilter) if brains and searchTerm and fields_wo_index: _brains = [] for brain in brains: for field_name in fields_wo_index: value = getattr(brain, field_name, None) if not value: instance = brain.getObject() schema = instance.Schema() if field_name in schema: value = schema[field_name].get(instance) if callable(value): value = value() if value and value.lower().find(searchTerm) > -1: _brains.append(brain) break brains = _brains # Then just base_query alone ("show all if no match") if not brains and force_all.lower() == 'true': if search_query: brains = catalog(base_query) if brains and searchTerm: _brains = [ p for p in brains if p.Title.lower().find(searchTerm) > -1 ] if _brains: brains = _brains return brains
def _process_request(self): # Use this function from a template that is using bika_listing_table # in such a way that the table_only request var will be used to # in-place-update the table. form_id = self.form_id form = self.request.form workflow = getToolByName(self.context, 'portal_workflow') # If table_only specifies another form_id, then we abort. # this way, a single table among many can request a redraw, # and only it's content will be rendered. if form_id not in self.request.get('table_only', form_id): return '' # review_state_selector (POST value OR cookie value OR 'all') request_key = form_id + '_review_state' review_state_name = self.request.get(request_key, None) \ or 'all' states = [ r for r in self.review_states if r['id'] == review_state_name ] review_state = states and states[0] or self.review_states[0] self.review_state = review_state['id'] self.request[request_key] = review_state['id'] self.request.response.setCookie(request_key, review_state['id'], path=self.view_url) if review_state.has_key('contentFilter'): for k, v in review_state['contentFilter'].items(): self.contentFilter[k] = v else: if review_state['id'] != 'all': self.contentFilter['review_state'] = review_state['id'] # sort on sort_on = self.request.get(form_id + '_sort_on', '') # manual_sort_on: only sort the current batch of items # this is a compromise for sorting without column indexes self.manual_sort_on = None if sort_on \ and sort_on in self.columns.keys() \ and self.columns[sort_on].get('index', None): idx = self.columns[sort_on].get('index', sort_on) self.contentFilter['sort_on'] = idx else: if sort_on: self.manual_sort_on = sort_on if 'sort_on' in self.contentFilter: del self.contentFilter['sort_on'] # sort order self.sort_order = self.request.get(form_id + '_sort_order', '') if self.sort_order: self.contentFilter['sort_order'] = self.sort_order else: if 'sort_order' not in self.contentFilter: self.sort_order = 'ascending' self.contentFilter['sort_order'] = 'ascending' self.request.set(form_id + '_sort_order', 'ascending') else: self.sort_order = self.contentFilter['sort_order'] if self.manual_sort_on: del self.contentFilter['sort_order'] # pagesize self.pagesize = int( self.request.get(form_id + '_pagesize', self.pagesize)) self.request.set('pagesize', self.pagesize) # pagenumber self.pagenumber = int( self.request.get(form_id + '_pagenumber', self.pagenumber)) self.request.set('pagenumber', self.pagenumber) # index filters. self.And = [] self.Or = [] ##logger.info("contentFilter: %s"%self.contentFilter) for k, v in self.columns.items(): if not v.has_key('index') \ or v['index'] == 'review_state' \ or v['index'] in self.filter_indexes: continue self.filter_indexes.append(v['index']) ##logger.info("Filter indexes: %s"%self.filter_indexes) # any request variable named ${form_id}_{index_name} # will pass it's value to that index in self.contentFilter. # all conditions using ${form_id}_{index_name} are searched with AND for index in self.filter_indexes: request_key = "%s_%s" % (form_id, index) if request_key in self.request: ##logger.info("And: %s=%s"%(index, self.request[request_key])) ##self.And.append(MatchGlob(index, self.request[request_key])) self.And.append(MatchRegexp(index, self.request[request_key])) # if there's a ${form_id}_filter in request, then all indexes # are are searched for it's value. # ${form_id}_filter is searched with OR agains all indexes request_key = "%s_filter" % form_id if request_key in self.request and self.request[request_key] != '': for index in self.filter_indexes: ##logger.info("Or: %s=%s"%(index, self.request[request_key])) ##self.Or.append(MatchGlob(index, self.request[request_key])) self.Or.append(MatchRegexp(index, self.request[request_key])) self.Or.append( MatchRegexp('review_state', self.request[request_key])) # get toggle_cols cookie value # and modify self.columns[]['toggle'] to match. try: toggles = {} # request OR cookie OR default toggles = json.loads( self.request.get('toggle_cookie', self.request.get("toggle_cols", "{}"))) except: pass finally: if not toggles: toggles = {} cookie_key = "%s/%s" % (self.view_url, form_id) toggle_cols = toggles.get(cookie_key, [ col for col in self.columns.keys() if col in review_state['columns'] and ('toggle' not in self.columns[col] or self.columns[col]['toggle'] == True) ]) for col in self.columns.keys(): if col in toggle_cols: self.columns[col]['toggle'] = True else: self.columns[col]['toggle'] = False