def _reload_netboxes(self): """Reload the set of netboxes to poll and update schedules.""" deferred = self.netboxes.load_all() deferred.addCallbacks(self._process_reloaded_netboxes, self._handle_reload_failures) db.django_debug_cleanup() return deferred
def test_django_debug_cleanup_should_run_without_errors(): from django.db import connection list(Netbox.objects.all()) # generate a query assert len(connection.queries) > 0, "no query was logged by Django" assert django_debug_cleanup() is None
def load_all_s(self): """Synchronously load netboxes from database. Returns: A three-tuple, (new_ids, lost_ids, changed_ids), whose elements are sets of netbox IDs. - The first set are IDs that a new since the last load operation. - The second is the set of IDs that have been removed since the last load operation. - The third is the set of IDs of netboxes whose information have changed in the database since the last load operation. """ related = ('room__location', 'type__vendor', 'category', 'organization', 'device') snmp_up_query = """SELECT COUNT(*) = 0 FROM alerthist WHERE alerthist.netboxid = netbox.netboxid AND eventtypeid='snmpAgentState' AND end_time >= 'infinity' """ queryset = (manage.Netbox.objects.select_related(*related).extra( select={'snmp_up': snmp_up_query})) netbox_list = storage.shadowify_queryset(queryset) netbox_dict = dict((netbox.id, netbox) for netbox in netbox_list) times = load_last_updated_times() for netbox in netbox_list: netbox.last_updated = times.get(netbox.id, {}) django_debug_cleanup() previous_ids = set(self.keys()) current_ids = set(netbox_dict.keys()) lost_ids = previous_ids.difference(current_ids) new_ids = current_ids.difference(previous_ids) same_ids = previous_ids.intersection(current_ids) changed_ids = set(i for i in same_ids if is_netbox_changed(self[i], netbox_dict[i])) # update self for i in lost_ids: del self[i] for i in new_ids: self[i] = netbox_dict[i] for i in same_ids: self[i].copy(netbox_dict[i]) self.peak_count = max(self.peak_count, len(self)) anything_changed = len(new_ids) or len(lost_ids) or len(changed_ids) log = self._logger.info if anything_changed else self._logger.debug log( "Loaded %d netboxes from database " "(%d new, %d removed, %d changed, %d peak)", len(netbox_dict), len(new_ids), len(lost_ids), len(changed_ids), self.peak_count) return (new_ids, lost_ids, changed_ids)
def load_all_s(self): """Synchronously load netboxes from database. Returns: A three-tuple, (new_ids, lost_ids, changed_ids), whose elements are sets of netbox IDs. - The first set are IDs that a new since the last load operation. - The second is the set of IDs that have been removed since the last load operation. - The third is the set of IDs of netboxes whose information have changed in the database since the last load operation. """ related = ('room__location', 'type__vendor', 'category', 'organization') snmp_down = set( event.AlertHistory.objects.unresolved( 'snmpAgentState').values_list('netbox__id', flat=True)) self._logger.debug("These netboxes have active snmpAgentStates: %r", snmp_down) queryset = manage.Netbox.objects.filter(deleted_at__isnull=True) queryset = list(queryset.select_related(*related)) for netbox in queryset: netbox.snmp_up = netbox.id not in snmp_down netbox_list = storage.shadowify_queryset(queryset) netbox_dict = dict((netbox.id, netbox) for netbox in netbox_list) times = load_last_updated_times() for netbox in netbox_list: netbox.last_updated = times.get(netbox.id, {}) django_debug_cleanup() previous_ids = set(self.keys()) current_ids = set(netbox_dict.keys()) lost_ids = previous_ids.difference(current_ids) new_ids = current_ids.difference(previous_ids) same_ids = previous_ids.intersection(current_ids) changed_ids = set(i for i in same_ids if is_netbox_changed(self[i], netbox_dict[i])) # update self for i in lost_ids: del self[i] for i in new_ids: self[i] = netbox_dict[i] for i in same_ids: self[i].copy(netbox_dict[i]) self.peak_count = max(self.peak_count, len(self)) anything_changed = len(new_ids) or len(lost_ids) or len(changed_ids) log = self._logger.info if anything_changed else self._logger.debug log( "Loaded %d netboxes from database " "(%d new, %d removed, %d changed, %d peak)", len(netbox_dict), len(new_ids), len(lost_ids), len(changed_ids), self.peak_count) return (new_ids, lost_ids, changed_ids)
def load_all_s(self): """Synchronously load netboxes from database. Returns: A three-tuple, (new_ids, lost_ids, changed_ids), whose elements are sets of netbox IDs. - The first set are IDs that a new since the last load operation. - The second is the set of IDs that have been removed since the last load operation. - The third is the set of IDs of netboxes whose information have changed in the database since the last load operation. """ related = ('room__location', 'type__vendor', 'category', 'organization', 'device') snmp_up_query = """SELECT COUNT(*) = 0 FROM alerthist WHERE alerthist.netboxid = netbox.netboxid AND eventtypeid='snmpAgentState' AND end_time >= 'infinity' """ queryset = (manage.Netbox.objects.select_related(*related). extra(select={'snmp_up': snmp_up_query})) netbox_list = storage.shadowify_queryset(queryset) netbox_dict = dict((netbox.id, netbox) for netbox in netbox_list) times = load_last_updated_times() for netbox in netbox_list: netbox.last_updated = times.get(netbox.id, {}) django_debug_cleanup() previous_ids = set(self.keys()) current_ids = set(netbox_dict.keys()) lost_ids = previous_ids.difference(current_ids) new_ids = current_ids.difference(previous_ids) same_ids = previous_ids.intersection(current_ids) changed_ids = set(i for i in same_ids if is_netbox_changed(self[i], netbox_dict[i])) # update self for i in lost_ids: del self[i] for i in new_ids: self[i] = netbox_dict[i] for i in same_ids: self[i].copy(netbox_dict[i]) self.peak_count = max(self.peak_count, len(self)) anything_changed = len(new_ids) or len(lost_ids) or len(changed_ids) log = self._logger.info if anything_changed else self._logger.debug log("Loaded %d netboxes from database " "(%d new, %d removed, %d changed, %d peak)", len(netbox_dict), len(new_ids), len(lost_ids), len(changed_ids), self.peak_count ) return (new_ids, lost_ids, changed_ids)