def _flatten_obj_instance(self, obj): """Recursively flatten an instance and return a json-friendly dict """ data = {} has_class = hasattr(obj, '__class__') has_dict = hasattr(obj, '__dict__') has_slots = not has_dict and hasattr(obj, '__slots__') # Support objects with __getstate__(); this ensures that # both __setstate__() and __getstate__() are implemented has_getstate = hasattr(obj, '__getstate__') has_getstate_support = has_getstate and hasattr(obj, '__setstate__') if has_class and not util.is_module(obj): module, name = _getclassdetail(obj) if self.unpicklable: #ksteinfe if module[:12] == "decodes.core": data[tags.OBJECT] = name else: data[tags.OBJECT] = '%s.%s' % (module, name) # Check for a custom handler handler = handlers.get(type(obj)) if handler is not None: return handler(self).flatten(obj, data) if util.is_module(obj): if self.unpicklable: data[tags.REPR] = '%s/%s' % (obj.__name__, obj.__name__) else: data = unicode(obj) return data if util.is_dictionary_subclass(obj): self._flatten_dict_obj(obj, data) if has_getstate_support: self._getstate(obj, data) return data if has_dict: # Support objects that subclasses list and set if util.is_sequence_subclass(obj): return self._flatten_sequence_obj(obj, data) if has_getstate_support: return self._getstate(obj, data) # hack for zope persistent objects; this unghostifies the object getattr(obj, '_', None) return self._flatten_dict_obj(obj.__dict__, data) if util.is_sequence_subclass(obj): return self._flatten_sequence_obj(obj, data) if util.is_noncomplex(obj): return [self._flatten(v) for v in obj] if has_slots: return self._flatten_newstyle_with_slots(obj, data)
def _restore_object(self, obj): cls = loadclass(obj[tags.OBJECT]) if cls is None: return self._mkref(obj) handler = handlers.get(cls) if handler is not None: # custom handler instance = handler(self).restore(obj) return self._mkref(instance) else: return self._restore_object_instance(obj, cls)
def compare_runs(old_run, new_run, checkers): logging.info("Comparing runs %s and %s", old_run, new_run) all_results = [] alerts = [] for checker in checkers: handler = handlers.get(checker['type']) similarity = handler(checker, old_run, new_run) if ((similarity is not None) and (similarity > checker['threshold'])) or ( (similarity is None) and checker.get('alert_if_none', True)): alerts.append(Alert(old_run, checker['name'], similarity)) all_results.append(similarity) logging.info("Similarity between %d and %d is (%s)", old_run.id, new_run.id, format_floats(all_results)) if len(alerts) > 0: log_alerts(old_run, new_run, alerts) return alerts
def _handle_client(cache_lock, cache, client): rd = client.makefile('rb', newline='\r\n') while True: line = rd.readline() if not line: # diconnected return # cut off \r\n line = line[:-2] if not line: continue command, *rest = line.split(maxsplit=1) command = command.decode(DEFAULT_ENCODING) handler = handlers.get(command) if handler is None: client.send(b'INVALID_COMMAND\r\n') continue try: cache_lock.acquire() handler(cache, rd, client, *rest) finally: cache_lock.release()
def resource_action(resource, action): handler = resource.metadata.get('handler', 'none') with handlers.get(handler)([resource], _default_transports) as h: return h.action(resource, action)
def resource_action(resource, action): handler = resource.db_obj.handler or 'none' with handlers.get(handler)([resource], _default_transports) as h: return h.action(resource, action)
def resource_action(resource, action): handler = resource.metadata.get('handler', 'none') with handlers.get(handler)([resource]) as h: return h.action(resource, action)