def __record(self, event_type, tpname, groupname, oid, dim_change, full_dim_map, is_projection = False): if not self.startrecording: return #for e event_type, tpname, oid, dim_changes in records: if event_type == Event.Delete and tpname == groupname: # it is its own key. Which means the obj is being deleted for good. # Purge all changes. if groupname in self.current_record and oid in self.current_record[groupname]: if "dims" in self.current_record[groupname][oid]: del self.current_record[groupname][oid]["dims"] for tp in self.current_record[groupname][oid]["types"]: self.current_record[groupname][oid]["types"][tp] = Event.Delete self.deleted_objs.setdefault(groupname, set()).add(oid) if event_type != Event.Delete and tpname in self.deleted_objs and oid in self.deleted_objs[tpname]: # This object is flagged for deletion. Throw this change away. return self.current_record.setdefault( groupname, RecursiveDictionary()).setdefault( oid, RecursiveDictionary({"types": RecursiveDictionary()}))["types"].rec_update(RecursiveDictionary({(groupname if event_type == Event.New and is_projection else tpname): event_type})) if dim_change: fks = [] dims = self.current_record[groupname][oid].setdefault( "dims", RecursiveDictionary()) dims.rec_update(dim_change)
def adjust_pcc(self, tp_obj, objs_and_changes, to_be_converted=False): if not self.calculate_pcc: return list() can_be_created_objs = tp_obj.pure_group_members old_memberships = dict() for othertp_obj in can_be_created_objs: othertp = othertp_obj.type othertpname = othertp_obj.name old_set = old_memberships.setdefault(othertp_obj, set()) if (othertpname in self.object_map): old_set.update( set([ oid for oid in self.object_map[othertpname] if oid in objs_and_changes ])) objs = RecursiveDictionary() changes = RecursiveDictionary() fks = list() # Ignore these fks. They should arrive with the rest of the object for oid in objs_and_changes: objs[oid], changes[oid] = objs_and_changes[oid] obj_map = ObjectManager.build_pccs(can_be_created_objs, {tp_obj.group_key: objs}, None) records = list() for othertp in obj_map: othertpname = othertp.__realname__ for oid in obj_map[othertp]: event = (Event.Modification if othertpname in self.object_map and oid in self.object_map[othertpname] else Event.New) self.object_map.setdefault( othertpname, RecursiveDictionary())[oid] = obj_map[othertp][oid] obj_map[othertp][ oid]._dataframe_data = self.type_manager.tp_to_dataframe_payload[ self.type_manager.get_requested_type(othertp)] obj_changes = ObjectManager.__convert_to_dim_map( obj_map[othertp] [oid]) if event == Event.New else changes[oid] records.extend( self.__create_records( event, self.type_manager.get_requested_type(othertp), oid, obj_changes, ObjectManager.__convert_to_dim_map( obj_map[othertp][oid]))) for othertp_obj in old_memberships: for oid in old_memberships[othertp_obj].difference( set(obj_map[othertp_obj.type])): if othertp_obj.name in self.object_map and oid in self.object_map[ othertp_obj.name]: records.append( ChangeRecord(Event.Delete, othertp_obj, oid, None, None)) del self.object_map[othertp_obj.name][oid] return records
def merge_impure_record(self, current_record, results): deleted = RecursiveDictionary() for tp in self.registered_impures: tpname = tp.__realname__ obj_oids = self.known_objects[tpname] if tpname in self.known_objects else set() next_oids = set([obj.__primarykey__ for obj in results[tp]]) if tp in results else set() deleted_oids = obj_oids.difference(next_oids) deleted[tpname] = deleted_oids impure_results = self.dataframe.convert_to_record(results, deleted) for group_name, grpchanges in impure_results.items(): if group_name not in current_record: current_record[group_name] = grpchanges continue for oid, obj_changes in grpchanges.items(): if oid not in current_record[group_name]: current_record[group_name][oid] = obj_changes continue for tpname, event in obj_changes["types"].items(): if tpname in current_record[group_name][oid]["types"]: existing_event = current_record[group_name][oid]["types"][tpname] else: existing_event = event if existing_event == Event.Delete or existing_event == Event.Modification: continue current_record[group_name][oid].setdefault("dims", RecursiveDictionary()).rec_update(obj_changes["dims"]) current_record[group_name][oid]["types"][tpname] = existing_event return current_record
def convert_whole_object_map(self): return self.convert_to_records( RecursiveDictionary( ((self.type_manager.get_requested_type_from_str(tpname).type, objmap.values()) for tpname, objmap in self.object_map.items())), RecursiveDictionary())
def __build_dimension_obj(self, dim_received, group_obj, full_record): groupname = group_obj.name dim_map = RecursiveDictionary() super_class = group_obj.super_class obj = super_class() for dim in dim_received: record = dim_received[dim] dim_map[dim] = record if not hasattr(super_class, dim): continue if record["type"] == Record.OBJECT: new_record = RecursiveDictionary() new_record["type"] = Record.DICTIONARY new_record["value"] = record["value"]["omap"] dict_value = self.__process_record(new_record, full_record) value = self.__create_fake_class()() value.__dict__ = dict_value value.__class__ = getattr(super_class, dim)._type elif (record["type"] == Record.COLLECTION or record["type"] == Record.DICTIONARY): collect = self.__process_record(record, full_record) value = getattr(super_class, dim)._type(collect) else: value = self.__process_record(record, full_record) setattr(obj, dim, value) return obj, dim_map
def __append(self, tp_obj, obj): records = list() tp = tp_obj.type tpname = tp_obj.name groupname = tp_obj.group_key # all clear to insert. try: oid = obj.__primarykey__ except AttributeError: setattr(obj, tp.__primarykey__._name, str(uuid4())) oid = obj.__primarykey__ tpname = tp.__realname__ # Store the state in records self.current_state.setdefault( groupname, RecursiveDictionary())[oid] = RecursiveDictionary(obj.__dict__) # Set the object state by reference to the original object's symbol table obj.__dict__ = self.current_state[groupname][oid] obj._dataframe_data = self.type_manager.tp_to_dataframe_payload[tp_obj] self.object_map.setdefault(tpname, RecursiveDictionary())[oid] = obj self.object_map[tpname][oid].__start_tracking__ = True obj_changes = ObjectManager.__convert_to_dim_map(obj) records.extend( self.__create_records(Event.New, tp_obj, oid, obj_changes, obj_changes)) return records
def __init__(self, name, types, dataframe): self.app_name = name self.known_objects = RecursiveDictionary() self.current_record = RecursiveDictionary() self.types = types self.dataframe = dataframe self.registered_impures, self.queue = self.dataframe.connect_app_queue(self) self.lock = RLock() self.first_run = True
def reset_tracking_cache_for_type(self, app, tpname): if self.app_check(app): self.__app_data[app][tpname] = RecursiveDictionary({ "new": RecursiveDictionary(), "mod": self.__app_data[app][tpname]["mod"], "deleted": set() })
def _frame__pull(self): if not self.mode or self.mode in DIFF_PULL: super(BenchmarkFrame, self)._frame__pull() else: self._instruments['bytes received'] = 0 if self._frame__disconnected: return self.object_store.clear_buffer() try: for host in self._frame__host_typemap: typemap = self._frame__host_typemap[host] pull_types = typemap["getting"].union( typemap["gettingsetting"].union(typemap["tracking"])) for t in pull_types: if t.__realname__ in self.object_store.object_map: self.object_store.object_map[ t.__realname__].clear() resp = self._frame__sessions[host].get( "%s/dictupdate" % host, data={ "observed_types": json.dumps([t.__realname__ for t in pull_types]) }) objs = resp.json() for t in objs: if objs[t]: typeObj = self._frame__name2type[t] real_objs = [ create_complex_obj( typeObj, obj, self.object_store.object_map) for obj in objs[t] ] if t == self.object_store.member_to_group[t]: # if it is a group key. Can use the default API to put the objs. self.object_store.extend(typeObj, real_objs) else: gkey = self.object_store.member_to_group[t] current_state_map = self.object_store.current_state.setdefault( gkey, RecursiveDictionary()) object_map = self.object_store.object_map.setdefault( t, RecursiveDictionary()) for obj in real_objs: oid = obj.__primarykey__ current_state_map.setdefault( oid, RecursiveDictionary()).rec_update( obj.__dict__) obj.__dict__ = current_state_map[oid] object_map[oid] = obj self._instruments['bytes received'] += len(resp.content) pass except ConnectionError: self.logger.exception("Disconnected from host.") self._frame__disconnected = True self._stop()
def __init__(self, type_manager, calculate_pcc=True): # <group key> -> id -> object state. (Might have to make this even better) # object state is {"base": base state, "type 1": extra fields etc., ...} self.current_state = dict() self.object_map = dict() self.calculate_pcc = calculate_pcc self.deleted_objs = RecursiveDictionary() self.type_manager = type_manager self.changelog = RecursiveDictionary() self.record_obj = RecursiveDictionary()
def __convert_obj_to_change_record(self, obj): fks = list() oid = obj.__primarykey__ dim_change_final = RecursiveDictionary() dim_change = self.__convert_to_dim_map(obj) for k, v in dim_change.items(): dim_change_final[k._name] = self.__generate_dim(v, fks, set()) return fks, dim_change_final
def __build_fk_into_objmap(self, fks, final_record): if len(fks) == 0: return more_fks = list() for fk, fk_type_obj, fk_obj in fks: group = fk_type_obj.group_key fk_event_type = Event.New new_fks, fk_full_obj = self.__convert_obj_to_change_record(fk_obj) more_fks.extend(new_fks) fk_obj_record = final_record.setdefault( fk_type_obj.group_key, RecursiveDictionary()).setdefault(fk, RecursiveDictionary()) fk_obj_record.setdefault( "dims", RecursiveDictionary()).rec_update(fk_full_obj) fk_obj_record.setdefault( "types", RecursiveDictionary())[fk_type_obj.name] = Event.New self.__build_fk_into_objmap(more_fks, final_record)
def add_buffer_changes(self, changes, deletes): try: if "gc" not in changes: return for groupname, group_changes in changes["gc"].items(): for oid, obj_changes in group_changes.items(): for tpname, event in obj_changes["types"].items(): try: self.changelog.setdefault( event, RecursiveDictionary()).setdefault( tpname, RecursiveDictionary())[oid] = ( self.object_map[tpname][oid] if event != Event.Delete else deletes[tpname][oid]) except Exception: raise except Exception: return
def __pull(self): self.object_store.clear_incoming_record() final_resp = RecursiveDictionary() for host in self.__host_typemap: resp = requests.get(host + "/tracked", data = { "get_types": json.dumps({"types": [tp.Class().__name__ for tp in list(self.__typemap["tracking"])]}) }) final_resp.rec_update(resp.json()) resp = requests.get(host + "/updated", data = { "get_types": json.dumps({ "types": [tp.Class().__name__ for tp in list(self.__typemap["getting"].union(self.__typemap["gettingsetting"]))] }) }) final_resp.rec_update(resp.json()) self.__process_pull_resp(final_resp)
def __add_new(self, objs_new, records, touched_objs): for tp_obj in objs_new: tp_current_state = self.current_state.setdefault( tp_obj.group_key, RecursiveDictionary()) for oid, obj_and_change in objs_new[tp_obj.name].items(): obj, change = obj_and_change tp_current_state.setdefault( oid, RecursiveDictionary()).rec_update(obj.__dict__) obj.__dict__ = tp_current_state[oid] obj._dataframe_data = self.type_manager.tp_to_dataframe_payload[ tp_obj] obj.__start_tracking__ = True self.object_map.setdefault(tp_obj.name, RecursiveDictionary())[oid] = obj touched_objs.setdefault(tp_obj.group_key, RecursiveDictionary())[oid] = change if self.propagate_changes: records.extend( self.__create_records(Event.New, tp_obj, oid, change, change, True))
def __adjust_pcc_touched(self, touched_objs): # for eadch tpname, objlist pair in the map, recalculate pccs records = list() for group_key, changes in touched_objs.items(): objs_and_changes = RecursiveDictionary() for oid, change in changes.items(): if group_key in self.object_map and oid in self.object_map[ group_key]: objs_and_changes[oid] = (self.object_map[group_key][oid], change) records.extend( self.adjust_pcc( self.type_manager.get_requested_type_from_str(group_key), objs_and_changes)) return records
def merge_records(self, records): #new_records_this_cycle = RecursiveDictionary() with self.lock: for rec in records: event, tpname, groupname, oid, dim_change, full_obj = ( rec.event, rec.tpname, rec.groupname, rec.oid, rec.dim_change, rec.full_obj) obj_changes = self.current_record.setdefault(groupname, RecursiveDictionary()).setdefault(oid, RecursiveDictionary()) type_changes = obj_changes.setdefault("types", RecursiveDictionary()) if tpname in type_changes and type_changes[tpname] == Event.Delete: continue is_known = tpname in self.known_objects and oid in self.known_objects[tpname] if event == Event.New: type_changes[tpname] = event obj_changes.setdefault("dims", RecursiveDictionary()).rec_update(full_obj) #new_records_this_cycle.setdefault(groupname, RecursiveDictionary()).setdefault(tpname, set()).add(oid) elif event == Event.Modification: type_changes[tpname] = event if is_known else Event.New change = dim_change if is_known else full_obj if change: obj_changes.setdefault("dims", RecursiveDictionary()).rec_update(change) elif event == Event.Delete: #if groupname in new_records_this_cycle and tpname in new_records_this_cycle[groupname] and oid in new_records_this_cycle[groupname][tpname]: # del type_changes[tpname] type_changes[tpname] = event
def __change_modified(self, objs_mod, records, touched_objs): for tp_obj in objs_mod: if tp_obj.name not in self.object_map: continue for oid, obj_and_change in objs_mod[tp_obj].items(): obj, change = obj_and_change if oid not in self.object_map[tp_obj.name]: # Treat as a new object # Not sure what to do. pass elif obj != None: self.object_map[tp_obj.name][oid].__dict__.rec_update( obj.__dict__) touched_objs.setdefault(tp_obj.group_key, RecursiveDictionary())[oid] = change if self.propagate_changes: records.extend( self.__create_records(Event.Modification, tp_obj, oid, change, None, True))
def __init__(self): # Stores the object references for new, mod, and deleted. self.current_buffer = RecursiveDictionary() # groupname -> {oid -> proto object representing changes.} self.current_record = RecursiveDictionary() self.known_objects = RecursiveDictionary() self.deleted_objs = RecursiveDictionary() self.queue_manager = QueueManager() self.startrecording = False
def __push(self): if self.__disconnected: return if self.__instrumented: self._instruments['bytes sent'] = 0 changes = self.object_store.get_record() #json.dump(changes, open("push_" + self.__appname + ".json", "a") , sort_keys = True, separators = (',', ': '), indent = 4) for host in self.__host_typemap: try: DF_CLS, content_type = FORMATS[self.__host_wire_format[host]] changes_for_host = DF_CLS() changes_for_host["gc"] = RecursiveDictionary([ (gck, gc) for gck, gc in changes["gc"].items() if gck in self.__host_to_push_groupkey[host] ]) if "types" in changes: changes_for_host["types"] = changes["types"] dictmsg = changes_for_host.SerializeToString() #update_dict = {"update_dict": protomsg} if self.__instrumented: self._instruments['bytes sent'] = sys.getsizeof(dictmsg) headers = {'content-type': content_type} if self.__compress: headers['content-encoding'] = 'gzip' dictmsg = zlib.compress(dictmsg) resp = self.__sessions[host].post(host + "/updated", data=dictmsg, headers=headers) except TypeError: self.logger.exception("error encoding obj. Object: %s", changes_for_host) except HTTPError as exc: self.__handle_request_errors(resp, exc) except ConnectionError: self.logger.exception("Disconnected from host.") self.__disconnected = True self._stop() self.object_store.clear_record() self.object_store.clear_buffer()
def __create_records(self, event, tp_obj, oid, obj_changes, full_obj_map, converted=False, fk_type_to=None, original_type=None): if event == Event.Delete: if tp_obj.group_key in self.record_obj and oid in self.record_obj[ tp_obj.group_key]: del self.record_obj[tp_obj.group_key][oid] elif event == Event.New: self.record_obj.setdefault( tp_obj.group_key, RecursiveDictionary()).setdefault( oid, RecursiveDictionary()).rec_update(full_obj_map) elif event == Event.Modification: if tp_obj.group_key in self.record_obj and oid in self.record_obj[ tp_obj.group_key] and obj_changes: self.record_obj[tp_obj.group_key][oid].rec_update(obj_changes) else: if full_obj_map == None: try: full_obj_map = ObjectManager.__convert_to_dim_map( self.object_map[tp_obj.name][oid]) except: if original_type: full_obj_map = ObjectManager.__convert_to_dim_map( self.object_map[original_type.name][oid]) else: raise TypeError( "Unknown error. Trying to modify an object that is weirdly tracked by dataframe? It is tracked by the dataframe, but is not in the database" ) self.record_obj.setdefault( tp_obj.group_key, RecursiveDictionary()).setdefault( oid, RecursiveDictionary()).rec_update(full_obj_map) if not full_obj_map and event != Event.Delete and not converted: full_obj_map = self.record_obj[tp_obj.group_key][oid] records = list() fks = list() new_obj_changes = RecursiveDictionary() new_full_obj_map = RecursiveDictionary() if converted: records.append( ChangeRecord(event, tp_obj, oid, obj_changes, full_obj_map, fk_type_to)) if obj_changes: for k, v in obj_changes.items(): if v["type"] == Record.FOREIGN_KEY: fk = v["value"]["object_key"] fk_event = Event.Modification if v["value"][ "group_key"] in self.object_map and fk in self.object_map[ v["value"]["group_key"]] else Event.New fk_type_obj = self.type_manager.get_requested_type_from_str( v["value"]["actual_type"]["name"]) fk_full_obj = self.__convert_to_dim_map( self.object_map[fk_type_obj.group_key][fk]) if fk_event == Event.New and fk_type_obj.group_key in self.object_map and fk in self.object_map[ fk_type_obj.group_key]: fk_dims = fk_full_obj records.extend( self.__create_records(fk_event, fk_type_obj, fk, fk_dims, fk_full_obj, fk_type_to=tp_obj)) if full_obj_map: for k, v in full_obj_map.items(): if v["type"] == Record.FOREIGN_KEY: fk = v["value"]["object_key"] fk_event = Event.Modification if v["value"][ "group_key"] in self.object_map and fk in self.object_map[ v["value"]["group_key"]] else Event.New fk_type_obj = self.type_manager.get_requested_type_from_str( v["value"]["actual_type"]["name"]) fk_full_obj = self.__convert_to_dim_map( self.object_map[fk_type_obj.group_key][fk]) if fk_event == Event.New and fk_type_obj.group_key in self.object_map and fk in self.object_map[ fk_type_obj.group_key]: fk_dims = fk_full_obj records.extend( self.__create_records(fk_event, fk_type_obj, fk, fk_dims, fk_full_obj, fk_type_to=tp_obj)) return records if obj_changes: for k, v in obj_changes.items(): if not hasattr(k, "_name"): new_obj_changes[k] = v else: new_obj_changes[k._name] = self.__generate_dim( v, fks, set()) if full_obj_map: if full_obj_map == obj_changes: new_full_obj_map = new_obj_changes else: for k, v in full_obj_map.items(): if type(k) == str: new_full_obj_map[k] = v else: new_full_obj_map[k._name] = self.__generate_dim( v, fks, set()) for fk, fk_type_obj, fk_obj in fks: group = fk_type_obj.group_key fk_event_type = Event.Modification if group in self.object_map and fk in self.object_map[ group] else Event.New fk_dims = None fk_full_obj = self.__convert_to_dim_map(fk_obj) if fk_event_type == Event.New and group in self.object_map and fk in self.object_map[ group]: fk_dims = fk_full_obj records.extend( self.__create_records(fk_event_type, fk_type_obj, fk, fk_dims, fk_full_obj, fk_type_to=tp_obj)) records.append( ChangeRecord(event, tp_obj, oid, new_obj_changes, new_full_obj_map, fk_type_to)) return records
def register_app(self, app, types_allowed, types_extra): self.__app_data[app] = RecursiveDictionary() self.__app_allowed_types[app] = types_allowed for tpname in types_allowed.union(types_extra): self.reset_cache_for_type(app, tpname)
def __process_record(self, record, full_record): if record["type"] == Record.INT: # the value will be in record["value"] return long(record["value"]) if record["type"] == Record.FLOAT: # the value will be in record["value"] return float(record["value"]) if record["type"] == Record.STRING: # the value will be in record["value"] return record["value"] if record["type"] == Record.BOOL: # the value will be in record["value"] return record["value"] if record["type"] == Record.NULL: # No value, just make it None return None if record["type"] == Record.OBJECT: # The value is { # "omap": <Dictionary Record form of the object (__dict__)>, # "type": {"name": <name of type, "type_pickled": pickled string of type <- optional part # } # So parse it like a dict and update the object dict new_record = RecursiveDictionary() new_record["type"] = Record.DICTIONARY new_record["value"] = record["value"]["omap"] dict_value = self.__process_record(new_record, full_record) value = self.__create_fake_class()() # Set type of object from record.value.object.type. Future work. value.__dict__ = dict_value return value if record["type"] == Record.COLLECTION: # Assume it is list, as again, don't know this type # value is just list of records return [ self.__process_record(rec, full_record) for rec in record["value"] ] if record["type"] == Record.DICTIONARY: # Assume it is dictionary, as again, don't know this type # value-> [{"k": key_record, "v": val_record}] Has to be a list because key's may not be string return RecursiveDictionary([ (self.__process_record(p["k"], full_record), self.__process_record(p["v"], full_record)) for p in record["value"] ]) if record["type"] == Record.FOREIGN_KEY: # value -> {"group_key": group key, # "actual_type": {"name": type name, "type_pickled": pickle form of type}, # "object_key": object key} groupname = record["value"]["group_key"] oid = record["value"]["object_key"] name2type = self.type_manager.get_name2type_map() if groupname not in name2type: # This type cannot be created, it is not registered with the DataframeModes return None actual_type_name = (record["value"]["actual_type"]["name"] if "actual_type" in record["value"] and "name" in record["value"]["actual_type"] else groupname) actual_type_name, actual_type = ( (actual_type_name, self.type_manager.get_name2type_map()[actual_type_name].type) if (actual_type_name in self.type_manager.get_name2type_map()) else (groupname, self.type_manager.get_name2type_map()[groupname].type)) if groupname in self.current_state and oid in self.current_state[ groupname]: # The object exists in one form or the other. if actual_type_name in self.object_map and oid in self.object_map[ actual_type_name]: # If the object already exists. Any new object will update that. return self.object_map[actual_type_name][oid] # The group object exists, but not in the actual_type obj. # The object does not exist, create a dummy one and the actual object will get updated # in some other group change in this iteration. if groupname in full_record and oid in full_record[ groupname] and actual_type_name in full_record[groupname][ oid]["types"] and full_record[groupname][oid]["types"][ actual_type_name] == Event.New: # Object is in the incoming record. Can build that. Duplicates will not be built anyway. obj, _ = self.__build_dimension_obj( full_record[groupname][oid]["dims"], self.type_manager.get_requested_type_from_str(groupname), full_record) obj_state = self.current_state.setdefault( groupname, RecursiveDictionary()).setdefault(oid, RecursiveDictionary()) obj_state.rec_update(obj.__dict__) obj.__dict__ = obj_state obj.__class__ = actual_type self.object_map.setdefault(actual_type_name, RecursiveDictionary())[oid] = obj return obj raise TypeError("Do not know dimension type %s", record["type"])
class ApplicationQueue(object): def __init__(self, name, types, dataframe): self.app_name = name self.known_objects = RecursiveDictionary() self.current_record = RecursiveDictionary() self.types = types self.dataframe = dataframe self.registered_impures, self.queue = self.dataframe.connect_app_queue(self) self.lock = RLock() self.first_run = True def merge_records(self, records): #new_records_this_cycle = RecursiveDictionary() with self.lock: for rec in records: event, tpname, groupname, oid, dim_change, full_obj = ( rec.event, rec.tpname, rec.groupname, rec.oid, rec.dim_change, rec.full_obj) obj_changes = self.current_record.setdefault(groupname, RecursiveDictionary()).setdefault(oid, RecursiveDictionary()) type_changes = obj_changes.setdefault("types", RecursiveDictionary()) if tpname in type_changes and type_changes[tpname] == Event.Delete: continue is_known = tpname in self.known_objects and oid in self.known_objects[tpname] if event == Event.New: type_changes[tpname] = event obj_changes.setdefault("dims", RecursiveDictionary()).rec_update(full_obj) #new_records_this_cycle.setdefault(groupname, RecursiveDictionary()).setdefault(tpname, set()).add(oid) elif event == Event.Modification: type_changes[tpname] = event if is_known else Event.New change = dim_change if is_known else full_obj if change: obj_changes.setdefault("dims", RecursiveDictionary()).rec_update(change) elif event == Event.Delete: #if groupname in new_records_this_cycle and tpname in new_records_this_cycle[groupname] and oid in new_records_this_cycle[groupname][tpname]: # del type_changes[tpname] type_changes[tpname] = event def get_record(self): records = list() while True: try: records.extend(self.queue.get_nowait()) except Empty: self.merge_records(records) records = list() break objmap = self.fetch_impure_types() return ApplicationQueue.__convert_to_serializable_dict( self.set_known_objects( self.merge_impure_record(self.current_record, objmap))) def clear_record(self): self.current_record = RecursiveDictionary() def fetch_impure_types(self): objmap = RecursiveDictionary() for tp in (self.registered_impures if not self.first_run else self.types): objmap[tp] = self.dataframe.get(tp) self.first_run = False return objmap def merge_impure_record(self, current_record, results): deleted = RecursiveDictionary() for tp in self.registered_impures: tpname = tp.__realname__ obj_oids = self.known_objects[tpname] if tpname in self.known_objects else set() next_oids = set([obj.__primarykey__ for obj in results[tp]]) if tp in results else set() deleted_oids = obj_oids.difference(next_oids) deleted[tpname] = deleted_oids impure_results = self.dataframe.convert_to_record(results, deleted) for group_name, grpchanges in impure_results.items(): if group_name not in current_record: current_record[group_name] = grpchanges continue for oid, obj_changes in grpchanges.items(): if oid not in current_record[group_name]: current_record[group_name][oid] = obj_changes continue for tpname, event in obj_changes["types"].items(): if tpname in current_record[group_name][oid]["types"]: existing_event = current_record[group_name][oid]["types"][tpname] else: existing_event = event if existing_event == Event.Delete or existing_event == Event.Modification: continue current_record[group_name][oid].setdefault("dims", RecursiveDictionary()).rec_update(obj_changes["dims"]) current_record[group_name][oid]["types"][tpname] = existing_event return current_record def set_known_objects(self, current_record): for groupname, grp_changes in current_record.items(): for oid, obj_changes in grp_changes.items(): for tpname, status in obj_changes["types"].items(): if status == Event.New: self.known_objects.setdefault(tpname, set()).add(oid) elif status == Event.Delete: self.known_objects[tpname].remove(oid) return current_record @staticmethod def __convert_to_serializable_dict(current_record): df_changes = df_repr.DataframeChanges_Base() df_changes.ParseFromDict({"gc": current_record}) return df_changes
def clear_record(self): self.current_record = RecursiveDictionary()
def fetch_impure_types(self): objmap = RecursiveDictionary() for tp in (self.registered_impures if not self.first_run else self.types): objmap[tp] = self.dataframe.get(tp) self.first_run = False return objmap
def __convert_to_dim_map(obj): return RecursiveDictionary([(dim, getattr(obj, dim._name)) for dim in obj.__dimensions__ if hasattr(obj, dim._name)])
def __init__(self): self.__app_data = RecursiveDictionary() self.__app_allowed_types = {} self.logger = logging.getLogger(__name__)
class ChangeManager(object): def __init__(self): # Stores the object references for new, mod, and deleted. self.current_buffer = RecursiveDictionary() # groupname -> {oid -> proto object representing changes.} self.current_record = RecursiveDictionary() self.known_objects = RecursiveDictionary() self.deleted_objs = RecursiveDictionary() self.queue_manager = QueueManager() self.startrecording = False ################################################# ### Static Methods ############################## ################################################# ################################################# ### API Methods ################################# ################################################# def report_dim_modification(self, records): for record in records: self.__record(record.event, record.tpname, record.groupname, record.oid, record.dim_change, record.full_obj, record.is_projection) def add_records(self, applied_records, pcc_change_records = None, except_app = None): records = (applied_records + pcc_change_records) if pcc_change_records else applied_records for rec in records: event, tpname, groupname, oid, dim_change, full_dim_map, is_projection = ( rec.event, rec.tpname, rec.groupname, rec.oid, rec.dim_change, rec.full_obj, rec.is_projection) self.__record(event, tpname, groupname, oid, dim_change, full_dim_map) self.__send_to_queues(applied_records, pcc_change_records, except_app) def add_changelog(self, changes): pass def get_record(self): return self.convert_to_serializable_dict(self.current_record) def add_app_queue(self, app_queue): return self.queue_manager.add_app_queue(app_queue) def build_change_map(self, records): the_dict = RecursiveDictionary() def convert_to_serializable_dict(self, current_record): df_changes = df_repr.DataframeChanges_Base() df_changes.ParseFromDict({"gc": current_record}) return df_changes def clear_record(self): self.current_record = RecursiveDictionary() ################################################# ### Private Methods ############################# ################################################# def __record_objs_to_dict(self, the_dict, tpname, groupname, oid, full_obj_map): objmap = the_dict.setdefault(groupname, RecursiveDictionary()).setdefault(oid, RecursiveDictionary()) objmap.setdefault("types", RecursiveDictionary())[tpname] = Event.New objmap.setdefaykt("dims", RecursiveDictionary()).rec_update(full_obj_map) def __record(self, event_type, tpname, groupname, oid, dim_change, full_dim_map, is_projection = False): if not self.startrecording: return #for e event_type, tpname, oid, dim_changes in records: if event_type == Event.Delete and tpname == groupname: # it is its own key. Which means the obj is being deleted for good. # Purge all changes. if groupname in self.current_record and oid in self.current_record[groupname]: if "dims" in self.current_record[groupname][oid]: del self.current_record[groupname][oid]["dims"] for tp in self.current_record[groupname][oid]["types"]: self.current_record[groupname][oid]["types"][tp] = Event.Delete self.deleted_objs.setdefault(groupname, set()).add(oid) if event_type != Event.Delete and tpname in self.deleted_objs and oid in self.deleted_objs[tpname]: # This object is flagged for deletion. Throw this change away. return self.current_record.setdefault( groupname, RecursiveDictionary()).setdefault( oid, RecursiveDictionary({"types": RecursiveDictionary()}))["types"].rec_update(RecursiveDictionary({(groupname if event_type == Event.New and is_projection else tpname): event_type})) if dim_change: fks = [] dims = self.current_record[groupname][oid].setdefault( "dims", RecursiveDictionary()) dims.rec_update(dim_change) def __send_to_queues(self, applied_records, pcc_change_records, except_app = None): self.queue_manager.add_records(applied_records, pcc_change_records, except_app)
def build_change_map(self, records): the_dict = RecursiveDictionary()
def __record_objs_to_dict(self, the_dict, tpname, groupname, oid, full_obj_map): objmap = the_dict.setdefault(groupname, RecursiveDictionary()).setdefault(oid, RecursiveDictionary()) objmap.setdefault("types", RecursiveDictionary())[tpname] = Event.New objmap.setdefaykt("dims", RecursiveDictionary()).rec_update(full_obj_map)