def searchForMembers(self, REQUEST=None, **kw): """Search for users (not groups, not groups-as-users) of a site """ if REQUEST: dict = REQUEST else: dict = kw name = dict.get('name', '') # Split up search terms but leave quoted ones together try: names = shlex.split(name) except ValueError: try: names = shlex.split(name.replace("'", "\\'")) except ValueError: names = shlex.split(name.replace("'", "\\'") + '"') # Short circuit: if all that was asked for was '*', just return everyone if names == ['*']: query = And() else: queries = [] for name in names: queries.extend([ MatchGlob('fullname', name), MatchGlob('email', name), Eq('getUserName', name) ]) query = Or(*queries) zLOG.LOG('MembershipTool', zLOG.BLATHER, 'Querying: %s' % query) catalog = getToolByName(self, 'member_catalog') return catalog.evalAdvancedQuery(query, ('surname', 'firstname'))
def filteredDevices(context, args, *types): path = '/zport/dmd' deviceFilter = args.get('deviceFilter', '') or '' deviceClass = args.get('deviceClass', '') or '' extraquery = args.get('extraquery', '') filter = [] if deviceFilter: filter.append( MatchGlob('name', '*%s*' % deviceFilter) | MatchGlob('id', '*%s*' % deviceFilter)) if deviceClass: organizer = (''.join([path, deviceClass]), ) else: organizer = (''.join( [path, args.get('organizer', '/Devices') or '/Devices']), ) if not types: types = 'Products.ZenModel.Device.Device' if extraquery: filter.extend(extraquery) query = And(*filter) if filter else None results = IModelCatalogTool(context).search(types, paths=organizer, query=query) for brain in results: try: yield brain.getObject() except Exception: log.warn("Unable to unbrain at path %s", brain.getPath())
def testSearches(self): n_organizers = 100 organizers = {} pattern = "testSearches_DEVICE_CLASS_" for i in xrange(n_organizers): dc = self.dmd.Devices.createOrganizer("{}{:02}".format(pattern, i)) organizers[dc.idx_uid()] = dc query = MatchGlob(UID, "/zport/dmd/Devices/{}*".format(pattern)) search_results = self.model_catalog.search(query=query, commit_dirty=False) self.assertTrue(search_results.total == 0) search_results = self.model_catalog.search(query=query, commit_dirty=True) self.assertTrue(search_results.total == n_organizers) search_results = self.model_catalog.search(query=query, limit=0) self.assertTrue(search_results.total == n_organizers) limit = 18 for start in [0, 12, 45, 70]: expected_uids = { "/zport/dmd/Devices/{}{:02}".format(pattern, i) for i in xrange(start, start + limit) } search_results = self.model_catalog.search(query=query, start=start, limit=limit) self.assertTrue(search_results.total == n_organizers) brain_uids = { getattr(brain, UID) for brain in search_results.results } self.assertEquals(len(brain_uids), limit) self.assertEquals(len(brain_uids), len(expected_uids)) self.assertTrue(len(expected_uids - brain_uids) == 0) self.assertTrue(len(brain_uids - expected_uids) == 0)
def _typecatComponentBrains(self, uid=None, types=(), meta_type=(), start=0, limit=None, sort='name', dir='ASC', name=None, keys=()): obj = self._getObject(uid) spec = get_component_field_spec(meta_type) if spec is None: return None, 0 typecat = spec.get_catalog(obj, meta_type) sortspec = () if sort: if sort not in typecat._catalog.indexes: # Fall back to slow queries and sorting return None, 0 sortspec = ((sort, dir), ) querySet = [Generic('path', uid)] if name: querySet.append( Or(*(MatchGlob(field, '*%s*' % name) for field in spec.fields))) brains = typecat.evalAdvancedQuery(And(*querySet), sortspec) total = len(brains) if limit is None: brains = brains[start:] else: brains = brains[start:start + limit] return brains, total
def get_uids(index_client, root="", types=()): start = 0 need_results = True query = [Eq("tx_state", 0)] if root: root = root.rstrip('/') query.append( Or(Eq("uid", "{}".format(root)), MatchGlob("uid", "{}/*".format(root)))) if not isinstance(types, (tuple, list)): types = (types, ) if types: query.append(In("objectImplements", [dottedname(t) for t in types])) while need_results: search_results = index_client.search( SearchParams(query=And(*query), start=start, limit=MODEL_INDEX_BATCH_SIZE, order_by="uid", fields=["uid"])) start += MODEL_INDEX_BATCH_SIZE for result in search_results.results: yield result.uid need_results = start < search_results.total_count
def _findDevice(self, devicename, useTitle=True, commit_dirty=False): """ Returns all devices whose ip/id/title match devicename. ip/id matches are at the front of the list. @rtype: list of brains """ ors = [ MatchGlob('id', devicename), MatchGlob('text_ipAddress', devicename) ] if useTitle: ors.append(MatchGlob('name', devicename)) query = And( Eq("objectImplements", "Products.ZenModel.Device.Device"), Or(*ors) ) fields = [ "name", "id", "text_ipAddress" ] search_results = IModelCatalogTool(self.dmd.Devices).search(query=query, fields=fields, commit_dirty=commit_dirty) return list(search_results.results)
def _findDevice(self, devicename, useTitle=True): """ Returns all devices whose ip/id/title match devicename. ip/id matches are at the front of the list. @rtype: list of brains """ idIpQuery = Or(MatchGlob('id', devicename), Eq('getDeviceIp', devicename)) if useTitle: titleOrIdQuery = MatchGlob('titleOrId', devicename) query = Or(idIpQuery, titleOrIdQuery) rankSort = RankByQueries_Max((idIpQuery, 16), (titleOrIdQuery, 8)) devices = self._getCatalog().evalAdvancedQuery(query, (rankSort, )) else: devices = self._getCatalog().evalAdvancedQuery(idIpQuery) return devices
def devices(self, name): """return managed devices from ip, name or mac address taken from ZentinelPortal.py""" zcatalog = self.dmd.Devices.deviceSearch glob = name.rstrip('*') + '*' glob = MatchGlob('id', glob) query = Or(glob, Eq('getDeviceIp', name)) brains = zcatalog.evalAdvancedQuery(query) brains += self.dmd.Networks.ipSearch.evalAdvancedQuery(glob) return [b.getObject() for b in brains]
def __call__(self, query='', dataRoot='devices'): """ @param query: A glob by which to filter device names @type query: str @return: A JSON representation of a list of ids @rtype: "['id1', 'id2', 'id3']" """ if dataRoot != 'devices': import exceptions raise exceptions.ValueError("dataRoot should only be 'devices'") query_scope = self.context.dmd.Devices query = MatchGlob('name', query.rstrip('*') + '*') if isinstance(self.context, DeviceOrganizer): query_scope = self.context catalog = IModelCatalogTool(query_scope).devices brains = catalog.search(query=query, fields=['name']) return sorted((b.name for b in brains), key=lambda x: x.lower())
def __call__(self, query='', dataRoot='devices'): """ @param query: A glob by which to filter device names @type query: str @return: A JSON representation of a list of ids @rtype: "['id1', 'id2', 'id3']" """ if dataRoot != 'devices': import exceptions raise exceptions.ValueError("dataRoot should only be 'devices'") catalog = getToolByName(self.context.dmd.Devices, self.context.dmd.Devices.default_catalog) query = MatchGlob('titleOrId', query.rstrip('*') + '*') if isinstance(self.context, DeviceOrganizer): query = query & Eq('path', "/".join(self.context.getPhysicalPath())) brains = catalog.evalAdvancedQuery(query) # TODO: Add titleOrId to the catalog's metadata. return sorted((b.getObject().titleOrId() for b in brains), key=lambda x: x.lower())
def searchDevices(self, queryString='', REQUEST=None): """Returns the concatenation of a device name, ip and mac search on the list of devices. """ # TODO: Remove. Not used anymore in Zenoss code --Ian zcatalog = self.dmd.Devices.deviceSearch glob = queryString.rstrip('*') + '*' idGlob = MatchGlob('id', glob) titleGlob = MatchGlob('titleOrId', glob) idOrTitleQuery = Or(idGlob,titleGlob) query = Or(idOrTitleQuery, Eq('getDeviceIp', queryString)) additionalQuery = self._additionalQuery() if additionalQuery: query = And( query, additionalQuery ) brains = zcatalog.evalAdvancedQuery(query) if REQUEST and len(brains) == 1: raise Redirect(urllib.quote(brains[0].getPrimaryId)) if additionalQuery: idGlob = And( idGlob, additionalQuery ) brains += self.dmd.Networks.ipSearch.evalAdvancedQuery(idGlob) return [ b.getObject() for b in brains ]
def getObjectBrains(self, uid=None, start=0, limit=50, sort='name', dir='ASC', params=None, hashcheck=None, types=(), fields=[]): cat = IModelCatalogTool(self._getObject(uid)) reverse = bool(dir == 'DESC') qs = [] query = None globFilters = {} prodStates = None params = params if params else {} for key, value in params.iteritems(): if key == 'ipAddress': qs.append(MatchGlob('text_ipAddress', '{}*'.format(value))) elif key == 'productionState': qs.append( Or(*[Eq('productionState', str(state)) for state in value])) # ZEN-30949 - stringify values from the 'priority' list if it's passed in for query criteria elif key == 'priority': qs.append( Or(*[Eq('priority', str(priority)) for priority in value])) # ZEN-10057 - move filtering on indexed groups/systems/location from post-filter to query elif key in organizersToClass: organizerQuery = self.findMatchingOrganizers( organizersToClass[key], organizersToPath[key], value) if not organizerQuery: return [] qs.append(organizerQuery) else: globFilters[key] = value if qs: query = And(*qs) return cat.search(types, start=start, limit=limit, orderby=sort, reverse=reverse, query=query, globFilters=globFilters, hashcheck=hashcheck, fields=fields)
def get_rrd_templates(self, *args, **kwargs): query = Eq("objectImplements", "Products.ZenModel.RRDTemplate.RRDTemplate") kwargs["query"] = And(query, MatchGlob('uid', "/zport/dmd/Devices*")) return self.model_catalog.search(*args, **kwargs)
def testDataManager(self): # before any changes are made, tx_state is None self.assertIsNone(self._get_transaction_state()) device_class_1 = "device_class_1" device_class_2 = "device_class_2" device_class_3 = "device_class_3" device_class_4 = "device_class_4" # create an organizer dc_1 = self.dmd.Devices.createOrganizer(device_class_1) tx_state = self._get_transaction_state() dc_1_uid = dc_1.idx_uid() # Some tx_state checks self.assertIsNotNone(tx_state) self.assertTrue(len(tx_state.pending_updates) > 0) self.assertTrue(len(tx_state.indexed_updates) == 0) self.assertTrue(len(tx_state.temp_indexed_uids) == 0) self.assertTrue(len(tx_state.temp_deleted_uids) == 0) # The new organizer index update should have been buffered in tx_state self._check_tx_state(pending=dc_1_uid) # A search with commit_dirty=False should not find the new device organizer search_results = self.model_catalog.search(query=Eq(UID, dc_1_uid), commit_dirty=False) self.assertEquals(search_results.total, 0) # A search with commit_dirty=True must find the new device organizer search_results = self.model_catalog.search(query=Eq(UID, dc_1_uid), commit_dirty=True) # model catalog should return the dirty doc self.assertEquals(search_results.total, 1) self._validate_temp_indexed_results(search_results, expected_object_uids=[dc_1_uid]) # the tx_state object should have been updated appropiately self._check_tx_state(temp_indexed=dc_1_uid) self.assertTrue(len(tx_state.pending_updates) == 0) # create another organizer dc_2 = self.dmd.Devices.createOrganizer(device_class_2) dc_2_uid = dc_2.idx_uid() # check tx_state has been updated accordinly self._check_tx_state(pending=dc_2_uid, temp_indexed=dc_1_uid) # search for both device classes with commit_dirty=False, it should only return dc_1_uid query = MatchGlob(UID, "/zport/dmd/Devices/device_class*") search_results = self.model_catalog.search(query=query, commit_dirty=False) self._validate_temp_indexed_results(search_results, expected_object_uids=[dc_1_uid]) # tx_state should not have changed self._check_tx_state(pending=dc_2_uid, temp_indexed=dc_1_uid) # now with commit_dirty=True search_results = self.model_catalog.search(query=query, commit_dirty=True) self._check_tx_state(temp_indexed=[dc_1_uid, dc_2_uid]) # it should return 2 device classes self.assertEquals(search_results.total, 2) self._validate_temp_indexed_results( search_results, expected_object_uids=[dc_1_uid, dc_2_uid]) # Lets delete device_class_1 self.dmd.Devices._delObject(device_class_1) self._check_tx_state(pending=[dc_1_uid]) # a search with commit = True should not return device_class_1 anymore search_results = self.model_catalog.search(query=query, commit_dirty=True) self._validate_temp_indexed_results(search_results, expected_object_uids=[dc_2_uid]) self._check_tx_state(temp_deleted=[dc_1_uid]) # however, we should have two temp docs matching "/zport/dmd/Devices/device_class*" mi_results = self.model_index.search(SearchParams(query)) self.assertTrue(mi_results.total_count == 2) # make sure a count type of query works (search with limit=0) search_results = self.model_catalog.search(query=query, limit=0, commit_dirty=True) self.assertTrue(search_results.total == 1) # some more tx_state checks before moving on to the next thing tx_state = self._get_transaction_state() self.assertTrue(len(tx_state.pending_updates) == 0) self.assertTrue(len(tx_state.indexed_updates) == 2) self.assertTrue(len(tx_state.temp_indexed_uids) == 1) self.assertTrue(len(tx_state.temp_deleted_uids) == 1) # Simulate transaction is committed and do checks updated_uids = set( tx_state.pending_updates.keys()) | tx_state.temp_indexed_uids try: tid = self.data_manager._get_tid() # before commit we should have 2 docs with tx_state = tid mi_results = self.model_index.search( SearchParams(Eq(TX_STATE_FIELD, tid))) self.assertTrue(mi_results.total_count == 2) # Lets do the commit self._simulate_tx_commit() self.assertIsNone(self._get_transaction_state()) # Check we only have one doc matching "/zport/dmd/Devices/device_class*" search_results = self.model_catalog.search(query=query, commit_dirty=False) self.assertEquals(search_results.total, 1) # Check the result's tx_state field has been set to zero brain = search_results.results.next() self.assertEquals(brain.tx_state, 0) # No documents should remain with tx_state == tid mi_results = self.model_index.search( SearchParams(Eq(TX_STATE_FIELD, tid))) self.assertEquals(mi_results.total_count, 0) finally: # clean up created docs in solr query = In(UID, updated_uids) self.model_index.unindex_search(SearchParams(query)) # create another organizer in a new transaction dc_3 = self.dmd.Devices.createOrganizer(device_class_3) dc_3_uid = dc_3.idx_uid() self._check_tx_state(pending=dc_3_uid) tx_state = self._get_transaction_state() self.assertTrue(len(tx_state.pending_updates) == 1) self.assertTrue(len(tx_state.indexed_updates) == 0) self.assertTrue(len(tx_state.temp_indexed_uids) == 0) self.assertTrue(len(tx_state.temp_deleted_uids) == 0) # Manual mid-transaction commit self.data_manager.do_mid_transaction_commit() self._check_tx_state(temp_indexed=dc_3_uid) self.assertTrue(len(tx_state.pending_updates) == 0) self.assertTrue(len(tx_state.indexed_updates) == 1) self.assertTrue(len(tx_state.temp_indexed_uids) == 1) self.assertTrue(len(tx_state.temp_deleted_uids) == 0) query = MatchGlob(UID, "/zport/dmd/Devices/device_class*") search_results = self.model_catalog.search(query=query, commit_dirty=False) self._validate_temp_indexed_results(search_results, expected_object_uids=[dc_3_uid]) # Simulate transaction is aborted and check tx state has been reset self.data_manager.abort(transaction.get()) # No docs should match the device class uid search_results = self.model_catalog.search(query=Eq(UID, dc_3_uid), commit_dirty=False) self.assertTrue(search_results.total == 0) # No documents should remain with tx_state == tid tid = self.data_manager._get_tid() mi_results = self.model_index.search( SearchParams(Eq(TX_STATE_FIELD, tid))) self.assertEquals(mi_results.total_count, 0) self.assertIsNone(self._get_transaction_state()) # delete a doc that exists before current tx, do a search with commit dirty and abort dc_4 = self.dmd.Devices.createOrganizer(device_class_4) dc_4_uid = dc_4.idx_uid() query = Eq(UID, dc_4_uid) try: self._simulate_tx_commit( ) # commit to get the device_class_4 doc in solr # check the doc exists in solr search_results = self.model_catalog.search(query=query) self.assertTrue(search_results.total == 1) # delete the object self.dmd.Devices._delObject(device_class_4) # a model catalog search with commit_dirty=True should no return the deleted doc search_results = self.model_catalog.search(query=query, commit_dirty=True) self.assertTrue(search_results.total == 0) # however the doc is still in solr mi_results = self.model_index.search(SearchParams(query)) self.assertTrue(mi_results.total_count == 1) # Abort tx self.data_manager.abort(transaction.get()) # The doc should have been left intact in solr search_results = self.model_catalog.search(query=query) self.assertTrue(search_results.total == 1) finally: # clean up created docs in solr self.model_index.unindex_search(SearchParams(query))
def listMatchGlob(op, index, list): return op(*[MatchGlob(index, '*%s*' % i) for i in list])
def _parse_basic_query(attr, value): if isinstance(value, str) and '*' in value: return MatchGlob(attr, value) else: return Eq(attr, value)
def find_claimable_device(self, device_class=None): ''' Find a possible Linux device for the host: Search by id, title, and management IP, against id, hostnames, and IPs ''' if device_class is None: device_class = self.proxy_deviceclass() suggested_name = self.suggested_device_name() search_values = [ x for x in self.id, suggested_name, self.hostname, self.host_ip if x is not None ] brains = device_class.deviceSearch.evalAdvancedQuery( And( MatchGlob('getDeviceClassPath', device_class.getOrganizerName() + "*"), Or(In('id', search_values), In('titleOrId', search_values), In('getDeviceIp', search_values)))) possible_devices = [] for brain in brains: try: device = brain.getObject() if device.openstack_hostComponent() is None: if hasattr(device, 'getIpRealm'): if self.getIpRealm() is device.getIpRealm(): possible_devices.append(device) else: possible_devices.append(device) else: LOG.info( "%s component %s unable to claim device %s, because it is already linked to %s", self.meta_type, self.name(), device.id, device.openstack_hostComponent().id) except Exception: pass # 1. First look by matching id against my id/suggested_name/hostname for device in possible_devices: if device.id == self.id: return device for device in possible_devices: if device.id == suggested_name or device.id == self.hostname: return device # 2. Next find by matching name against my id/suggested_name/hostname for device in possible_devices: if device.name() == self.id: return device for device in possible_devices: if device.name() == suggested_name or device.name( ) == self.hostname: return device # Otherwise, return the first device, if one was found if possible_devices: return possible_devices[0] if device_class == self.proxy_deviceclass(): # check for other devices that we would have claimed, if they # had been in the right device class device = self.find_claimable_device(device_class=self.dmd.Devices) if device: LOG.info( "No claimable device found for %s, but %s was found " "in another device class. Moving it to %s will make " "it eligible.", self.id, device.id, self.proxy_deviceclass().getOrganizerName()) # No claimable device was found. return None