def load(self, pack, app): from Products.ZenRelations.ImportRM import ImportRM class AddToPack(ImportRM): def endElement(self, name): if name == 'object': obj = self.objstack[-1] log.debug('Now adding %s', obj.getPrimaryUrlPath()) try: obj.buildRelations() obj.removeRelation('pack') obj.addRelation('pack', pack) except Exception: log.exception("Error adding pack to %s", obj.getPrimaryUrlPath()) ImportRM.endElement(self, name) importer = AddToPack(noopts=True, app=app) importer.options.noindex = True importer.options.noCommit = True with pausedAndOptimizedIndexing(): for f in self.objectFiles(pack): log.info("Loading %s", f) importer.loadObjectFromXML(xmlfile=f)
def migrate_result(self, result): """Return True if result needed to be migrated. Delete instance properties that shadow class properties, and reindex the object if its resulting meta_type no longer matches its indexed meta_type. """ try: obj = result.getObject() except Exception: return False migrated = False try: del(obj.meta_type) except Exception: pass else: migrated = True try: del(obj.portal_type) except Exception: pass else: migrated = True if result.meta_type != obj.meta_type: with pausedAndOptimizedIndexing(obj.index_object()): notify(IndexingEvent(obj)) migrated = True return migrated
def migrate_result(self, result): """Return True if result needed to be migrated. Delete instance properties that shadow class properties, and reindex the object if its resulting meta_type no longer matches its indexed meta_type. """ try: obj = result.getObject() except Exception: return False migrated = False try: del (obj.meta_type) except Exception: pass else: migrated = True try: del (obj.portal_type) except Exception: pass else: migrated = True if result.meta_type != obj.meta_type: with pausedAndOptimizedIndexing(obj.index_object()): notify(IndexingEvent(obj)) migrated = True return migrated
def _applyDataMap(self, device, datamap): """Apply a datamap to a device. """ self.num_obj_changed = 0 log.debug("Started _applyDataMap for device %s", device.getId()) logname = "" # This can cause breakage in unit testing when the device is persisted. if not hasattr(device.dmd, 'zport'): transaction.abort() # There's the potential for a device to change device class during # modeling. Due to this method being run within a retrying @transact, # this will result in device losing its deviceClass relationship. if not device.deviceClass(): new_device = device.dmd.Devices.findDeviceByIdExact(device.id) if new_device: log.debug("%s changed device class to %s during modeling", new_device.titleOrId(), new_device.getDeviceClassName()) device = new_device else: log.error("%s lost its device class during modeling", device.titleOrId()) return False if hasattr(datamap, "compname"): if datamap.compname: try: tobj = device.getObjByPath(datamap.compname) except NotFound: log.warn("Unable to find compname '%s'" % datamap.compname) return False else: tobj = device # Delay indexing until the map has been fully processed # so we index the minimum amount with pausedAndOptimizedIndexing(): if hasattr(datamap, "relname"): logname = datamap.relname changed = self._updateRelationship(tobj, datamap) elif hasattr(datamap, 'modname'): logname = datamap.compname changed = self._updateObject(tobj, datamap) else: log.warn("plugin returned unknown map skipping") if not changed: transaction.abort() else: device.setLastChange() trans = transaction.get() trans.setUser("datacoll") trans.note("data applied from automated collection") log.debug("_applyDataMap for Device %s will modify %d objects for %s", device.getId(), self.num_obj_changed, logname) return changed
def _applyDataMap(self, device, datamap): """Apply a datamap to a device. """ # This can cause breakage in unit testing when the device is persisted. if not hasattr(device.dmd, 'zport'): transaction.abort() # There's the potential for a device to change device class during # modeling. Due to this method being run within a retrying @transact, # this will result in device losing its deviceClass relationship. if not device.deviceClass(): new_device = device.dmd.Devices.findDeviceByIdExact(device.id) if new_device: log.debug("%s changed device class to %s during modeling", new_device.titleOrId(), new_device.getDeviceClassName()) device = new_device else: log.error("%s lost its device class during modeling", device.titleOrId()) return False changed = False if hasattr(datamap, "compname"): if datamap.compname: try: tobj = device.getObjByPath(datamap.compname) except NotFound: log.warn("Unable to find compname '%s'", datamap.compname) return False else: tobj = device # Delay indexing until the map has been fully processed # so we index the minimum amount with pausedAndOptimizedIndexing(): if hasattr(datamap, "relname"): changed = self._updateRelationship(tobj, datamap) elif hasattr(datamap, 'modname'): changed = self._updateObject(tobj, datamap) else: log.warn("plugin returned unknown map skipping") if not changed: transaction.abort() else: device.setLastChange() trans = transaction.get() trans.setUser("datacoll") trans.note("data applied from automated collection") return changed
def from_nodes(self, nodes): self.log.info("creating %s devices", len(nodes)) progress = progresslog.ProgressLogger(self.log, prefix="creating devices", total=len(nodes), interval=1) devices = {} with pausedAndOptimizedIndexing(): for node, connected_nodes in nodes.items(): devices[node] = self.create_device(node, connected_nodes) progress.increment() # Update all nodes in Redis graph. [connections.add_node(d, force=True) for d in devices.values()] self.log.info("finished creating %s devices", len(nodes)) return devices
def _applyDataMap(self, device, datamap): """Apply a datamap to a device. """ self.num_obj_changed=0; log.debug("Started _applyDataMap for device %s",device.getId()) logname="" # This can cause breakage in unit testing when the device is persisted. if not hasattr(device.dmd, 'zport'): transaction.abort() # There's the potential for a device to change device class during # modeling. Due to this method being run within a retrying @transact, # this will result in device losing its deviceClass relationship. if not device.deviceClass(): new_device = device.dmd.Devices.findDeviceByIdExact(device.id) if new_device: log.debug("%s changed device class to %s during modeling", new_device.titleOrId(), new_device.getDeviceClassName()) device = new_device else: log.error("%s lost its device class during modeling", device.titleOrId()) return False changed = False if hasattr(datamap, "parentId") or hasattr(datamap, "compname"): if getattr(datamap, "parentId", None): if device.id == datamap.parentId: tobj = device else: tobj = device.componentSearch(id=datamap.parentId) if len(tobj) == 1: tobj = tobj[0].getObject() elif len(tobj) < 1: log.warn( "Unable to find a matching parentId '%s'", datamap.parentId) return False else: log.warn( "Too many object matching parentId '%s'. Make sure all components have a unique id.", datamap.parentId) return False elif datamap.compname: try: tobj = device.getObjByPath(datamap.compname) except NotFound: log.warn("Unable to find compname '%s'", datamap.compname) return False else: tobj = device # Delay indexing until the map has been fully processed # so we index the minimum amount with pausedAndOptimizedIndexing(): if hasattr(datamap, "relname"): logname=datamap.relname changed = self._updateRelationship(tobj, datamap) elif hasattr(datamap, 'modname'): logname=datamap.compname changed = self._updateObject(tobj, datamap) else: log.warn("plugin returned unknown map skipping") if not changed: transaction.abort() else: device.setLastChange() trans = transaction.get() trans.setUser("datacoll") trans.note("data applied from automated collection") log.debug("_applyDataMap for Device %s will modify %d objects for %s", device.getId(), self.num_obj_changed,logname) return changed
def test_pausedIndexing(self): seen_indexed = [] seen_unindexed = [] def clear(): while seen_indexed: seen_indexed.pop() while seen_unindexed: seen_unindexed.pop() def testOnIndexingEvent(ob, event): seen_indexed.append((ob, event)) def testOnUnindexingEvent(ob, event): seen_unindexed.append((ob, event)) provideHandler(testOnIndexingEvent, (IndexedObject, IndexingEvent)) provideHandler(testOnUnindexingEvent, (IndexedObject, UnindexingEvent)) ob1 = IndexedObject('1') # Verify events are wired correctly notify(IndexingEvent(ob1)) notify(UnindexingEvent(ob1)) self.assertEquals(1, len(seen_indexed)) self.assertEquals(1, len(seen_unindexed)) clear() # Basic functionality: single indexing event, all indexes with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1)) self.assertEquals(0, len(seen_indexed)) self.assertEquals(1, len(seen_indexed)) ob, event = seen_indexed[0] self.assertEquals(ob1, ob) self.assertTrue(not event.idxs) clear() # Removal, then index with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(UnindexingEvent(ob1)) # Make sure the removal event has proceeded uninterrupted self.assertEquals(1, len(seen_unindexed)) notify(IndexingEvent(ob1)) self.assertEquals(1, len(seen_indexed)) self.assertEquals(1, len(seen_unindexed)) clear() # Index, then removal with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1)) notify(UnindexingEvent(ob1)) self.assertEquals(0, len(seen_indexed)) self.assertEquals(1, len(seen_unindexed)) clear() # 2 indexing events with all indexes with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1)) notify(IndexingEvent(ob1)) self.assertEquals(1, len(seen_indexed)) clear() # 2 indexing events, specific first with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1, idxs=('path',))) notify(IndexingEvent(ob1)) self.assertEquals(1, len(seen_indexed)) ob, event = seen_indexed[0] self.assertTrue(not event.idxs) clear() # 2 indexing events, specific second with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1)) notify(IndexingEvent(ob1, idxs=('path',))) self.assertEquals(1, len(seen_indexed)) ob, event = seen_indexed[0] self.assertTrue(not event.idxs) clear() # 2 indexing events, both specific with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1, idxs=('path',))) notify(IndexingEvent(ob1, idxs=('uuid',))) self.assertEquals(1, len(seen_indexed)) ob, event = seen_indexed[0] self.assertIn('path', event.idxs) self.assertIn('uuid', event.idxs) self.assertEquals(2, len(event.idxs)) clear() # 2 indexing events, update_metadata on first with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1, update_metadata=True)) notify(IndexingEvent(ob1, update_metadata=False)) self.assertEquals(1, len(seen_indexed)) ob, event = seen_indexed[0] self.assertTrue(event.update_metadata) clear() # 2 indexing events, update_metadata on second with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1, update_metadata=False)) notify(IndexingEvent(ob1, update_metadata=True)) self.assertEquals(1, len(seen_indexed)) ob, event = seen_indexed[0] self.assertTrue(event.update_metadata) clear() # 2 indexing events, update_metadata on neither with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1, update_metadata=False)) notify(IndexingEvent(ob1, update_metadata=False)) self.assertEquals(1, len(seen_indexed)) ob, event = seen_indexed[0] self.assertFalse(event.update_metadata) clear() # 2 objects ob2 = IndexedObject('2') with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1)) notify(IndexingEvent(ob2)) self.assertEquals(2, len(seen_indexed)) ob_a, event = seen_indexed[0] ob_b, event = seen_indexed[1] self.assertTrue(ob_a != ob_b) self.assertIn(ob_a, (ob1, ob2)) self.assertIn(ob_b, (ob1, ob2)) clear()
def test_pausedIndexing(self): seen_indexed = [] seen_unindexed = [] def clear(): while seen_indexed: seen_indexed.pop() while seen_unindexed: seen_unindexed.pop() def testOnIndexingEvent(ob, event): seen_indexed.append((ob, event)) def testOnUnindexingEvent(ob, event): seen_unindexed.append((ob, event)) provideHandler(testOnIndexingEvent, (IndexedObject, IndexingEvent)) provideHandler(testOnUnindexingEvent, (IndexedObject, UnindexingEvent)) ob1 = IndexedObject('1') # Verify events are wired correctly notify(IndexingEvent(ob1)) notify(UnindexingEvent(ob1)) self.assertEquals(1, len(seen_indexed)) self.assertEquals(1, len(seen_unindexed)) clear() # Basic functionality: single indexing event, all indexes with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1)) self.assertEquals(0, len(seen_indexed)) self.assertEquals(1, len(seen_indexed)) ob, event = seen_indexed[0] self.assertEquals(ob1, ob) self.assertTrue(not event.idxs) clear() # Removal, then index with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(UnindexingEvent(ob1)) # Make sure the removal event has proceeded uninterrupted self.assertEquals(1, len(seen_unindexed)) notify(IndexingEvent(ob1)) self.assertEquals(1, len(seen_indexed)) self.assertEquals(1, len(seen_unindexed)) clear() # Index, then removal with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1)) notify(UnindexingEvent(ob1)) self.assertEquals(0, len(seen_indexed)) self.assertEquals(1, len(seen_unindexed)) clear() # 2 indexing events with all indexes with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1)) notify(IndexingEvent(ob1)) self.assertEquals(1, len(seen_indexed)) clear() # 2 indexing events, specific first with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1, idxs=('path', ))) notify(IndexingEvent(ob1)) self.assertEquals(1, len(seen_indexed)) ob, event = seen_indexed[0] self.assertTrue(not event.idxs) clear() # 2 indexing events, specific second with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1)) notify(IndexingEvent(ob1, idxs=('path', ))) self.assertEquals(1, len(seen_indexed)) ob, event = seen_indexed[0] self.assertTrue(not event.idxs) clear() # 2 indexing events, both specific with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1, idxs=('path', ))) notify(IndexingEvent(ob1, idxs=('uuid', ))) self.assertEquals(1, len(seen_indexed)) ob, event = seen_indexed[0] self.assertIn('path', event.idxs) self.assertIn('uuid', event.idxs) self.assertEquals(2, len(event.idxs)) clear() # 2 indexing events, update_metadata on first with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1, update_metadata=True)) notify(IndexingEvent(ob1, update_metadata=False)) self.assertEquals(1, len(seen_indexed)) ob, event = seen_indexed[0] self.assertTrue(event.update_metadata) clear() # 2 indexing events, update_metadata on second with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1, update_metadata=False)) notify(IndexingEvent(ob1, update_metadata=True)) self.assertEquals(1, len(seen_indexed)) ob, event = seen_indexed[0] self.assertTrue(event.update_metadata) clear() # 2 indexing events, update_metadata on neither with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1, update_metadata=False)) notify(IndexingEvent(ob1, update_metadata=False)) self.assertEquals(1, len(seen_indexed)) ob, event = seen_indexed[0] self.assertFalse(event.update_metadata) clear() # 2 objects ob2 = IndexedObject('2') with pausedAndOptimizedIndexing(testOnIndexingEvent, testOnUnindexingEvent): notify(IndexingEvent(ob1)) notify(IndexingEvent(ob2)) self.assertEquals(2, len(seen_indexed)) ob_a, event = seen_indexed[0] ob_b, event = seen_indexed[1] self.assertTrue(ob_a != ob_b) self.assertIn(ob_a, (ob1, ob2)) self.assertIn(ob_b, (ob1, ob2)) clear()