def _validate_building_data(self, b_dict): """ Ensure a dictionary containing building information is actually valid for updating purposes. The main goal is to validate the presence and format of b_id and/or l_b_id. If no b_id is present but a l_b_id is valid, it is set as current b_id, which ensures the building does not get discarded. Arguments: - b_dict: a dictionary representing a building Return value: True if data is valid, False otherwise """ b_id = b_dict.get("b_id", "") l_b_id = b_dict.get("l_b_id", "") if not Building.is_valid_bid(b_id): if Building.is_valid_bid(l_b_id): Logger.warning( "Invalid building id: \"{}\"".format(b_id), "- legacy id", l_b_id, "will be used instead." ) b_dict["b_id"] = l_b_id else: Logger.error( "Building discarded:", "Invalid building id", b_id, "and no valid legacy id is present" ) return False return True
def _validate_building_data(self, b_dict): """ Ensure a dictionary containing building information is actually valid for updating purposes. The main goal is to validate the presence and format of b_id and/or l_b_id. If no b_id is present but a l_b_id is valid, it is set as current b_id, which ensures the building does not get discarded. Arguments: - b_dict: a dictionary representing a building Return value: True if data is valid, False otherwise """ b_id = b_dict.get("b_id", "") l_b_id = b_dict.get("l_b_id", "") if not Building.is_valid_bid(b_id): if Building.is_valid_bid(l_b_id): Logger.warning("Invalid building id: \"{}\"".format(b_id), "- legacy id", l_b_id, "will be used instead.") b_dict["b_id"] = l_b_id else: Logger.error("Building discarded:", "Invalid building id", b_id, "and no valid legacy id is present") return False return True
def find_building_to_update(self, building_dict): """ Finds on database or create a Buiding object to be updated with information contained by building_dict Arguments: - building_dict: a dictionary containing the new values to be inserted on the building. Returns a Building object. This implementation ensures that documents saved with the legacy building id gets incorporated into the current building object, before actually returning it. If no legacy building is present in Database, the default behaviour is ensured: it either returns an existing Building object or creates a new one. """ b_id = building_dict["b_id"] building = Building.find_or_create_by_id(b_id) # controllo di non avere una mappatura tra b_id e l_b_id if "merged" not in building or not building["merged"].get("l_b_id", None): l_b_id = building_dict["l_b_id"] if not Building.is_valid_bid(l_b_id): return building to_merge = Building.find(l_b_id) if to_merge is not None: # abbiamo trovato un building corrispondente all'id legacy #building["dxf"] = to_merge["dxf"] building.attr("dxf", to_merge.attr("dxf")) def before_callback(b): DXFRoomIdsResolver.resolve_rooms_id(b, None, "edilizia") # Ensure floor merging is performed AFTER DXF Room_id resolution merged = b.attributes_for_source("merged") merged["floors"] = DataMerger.merge_floors( b.get("edilizia"), b.get("easyroom"), b.get("dxf") ) building.listen_once("before_save", before_callback) building.listen_once("after_save", lambda b: to_merge.destroy() ) return building
def find_building_to_update(self, building_dict): """ Finds on database or create a Buiding object to be updated with information contained by building_dict Arguments: - building_dict: a dictionary containing the new values to be inserted on the building. Returns a Building object. This implementation ensures that documents saved with the legacy building id gets incorporated into the current building object, before actually returning it. If no legacy building is present in Database, the default behaviour is ensured: it either returns an existing Building object or creates a new one. """ b_id = building_dict["b_id"] building = Building.find_or_create_by_id(b_id) # controllo di non avere una mappatura tra b_id e l_b_id if "merged" not in building or not building["merged"].get( "l_b_id", None): l_b_id = building_dict["l_b_id"] if not Building.is_valid_bid(l_b_id): return building to_merge = Building.find(l_b_id) if to_merge is not None: # abbiamo trovato un building corrispondente all'id legacy #building["dxf"] = to_merge["dxf"] building.attr("dxf", to_merge.attr("dxf")) def before_callback(b): DXFRoomIdsResolver.resolve_rooms_id(b, None, "edilizia") # Ensure floor merging is performed AFTER DXF Room_id resolution merged = b.attributes_for_source("merged") merged["floors"] = DataMerger.merge_floors( b.get("edilizia"), b.get("easyroom"), b.get("dxf")) building.listen_once("before_save", before_callback) building.listen_once("after_save", lambda b: to_merge.destroy()) return building
def api_get_building_by_id(b_id): """ <h3>/buildings/<em>b_id</em></h3> <p>Returns the building with the specified b_id .</p> <h5>Parameters</h6> <p><em>b_id[string]</em> : a valid b_id</p> """ if not Building.is_valid_bid(b_id): abort(400) building = app.buildings.find_one({'_id':b_id}) if not building: building = [] for i in range(0,len(building['floors'])): building['floors'][i]['map'] = maps_url( b_id,building['floors'][i]['f_id'] ) building = prepare_building_for_api(building) return jsonify(building)
def update_rooms(self,rooms): """ Perform an update of room data on Database. Arguments: - rooms: a list of dictionaries representing a room data. Does not return (None). Example of a room retrieved from an Edilizia csv file: { 'room_name' : 'Aula Seminari', 'cat_name' : 'Aula', 'r_id' : 'T065', 'b_id' : '11010', 'capacity' : '52', 'l_floor' : 'T' } The b_id field will be used to locate the associated building on the database. If it is found, it will be updated with the information, otherwise a new building will be created. Note that for each building that gets updated, it's floors will be emptied before adding the rooms information. Hence no partial updates are possible: everytime this method is called, it must receive the final list of rooms for each updated floor. """ # salviamo una data di aggiornamento comune a tutti i palazzi self.batch_date = datetime.now() # ordiniamo le stanze per edificio e per piano in modo da velocizzare l'algoritmo rooms.sort(key = lambda s: (s["b_id"], s["l_floor"])) # raggruppiamo le stanze per building_id rooms = groupby(rooms, key = lambda s: s["b_id"]) # Analizziamo un building alla volta for (b_id, rooms) in rooms: # Non procedo se il b_id non è valido if not Building.is_valid_bid(b_id): Logger.error( "Invalid building id: \"{}\".".format(b_id), "Rooms discarded:", ", ".join(r["r_id"] for r in rooms) ) continue building = Building.find_or_create_by_id(b_id) # Lavoro principale di aggiornamento self.replace_building_rooms(building, rooms) # Non sarebbe questa gia' una politica di merge? Si tratta di usare # info di piu' sorgenti per risolvere qualcosa di DXF, ma usiamo più # sorgenti! È un tipo di merge, non un DXFDataUpdater. Mi sembra nel # posto sbagliato questo metodo. Mi sembra che le funzionalità di # merge sono compito del building model. DXFRoomIdsResolver.resolve_rooms_id(building, None, self.get_namespace()) # Ensure floor merging is performed AFTER DXF Room_id resolution merged = building.attributes_for_source("merged") merged["floors"] = DataMerger.merge_floors( building.get("edilizia"), building.get("easyroom"), building.get("dxf") ) building.save()
def update_rooms(self, rooms): """ Perform an update of room data on Database. Arguments: - rooms: a list of dictionaries representing a room data. Does not return (None). Example of a room retrieved from an Edilizia csv file: { 'room_name' : 'Aula Seminari', 'cat_name' : 'Aula', 'r_id' : 'T065', 'b_id' : '11010', 'capacity' : '52', 'l_floor' : 'T' } The b_id field will be used to locate the associated building on the database. If it is found, it will be updated with the information, otherwise a new building will be created. Note that for each building that gets updated, it's floors will be emptied before adding the rooms information. Hence no partial updates are possible: everytime this method is called, it must receive the final list of rooms for each updated floor. """ # salviamo una data di aggiornamento comune a tutti i palazzi self.batch_date = datetime.now() # ordiniamo le stanze per edificio e per piano in modo da velocizzare l'algoritmo rooms.sort(key=lambda s: (s["b_id"], s["l_floor"])) # raggruppiamo le stanze per building_id rooms = groupby(rooms, key=lambda s: s["b_id"]) # Analizziamo un building alla volta for (b_id, rooms) in rooms: # Non procedo se il b_id non è valido if not Building.is_valid_bid(b_id): Logger.error("Invalid building id: \"{}\".".format(b_id), "Rooms discarded:", ", ".join(r["r_id"] for r in rooms)) continue building = Building.find_or_create_by_id(b_id) # Lavoro principale di aggiornamento self.replace_building_rooms(building, rooms) # Non sarebbe questa gia' una politica di merge? Si tratta di usare # info di piu' sorgenti per risolvere qualcosa di DXF, ma usiamo più # sorgenti! È un tipo di merge, non un DXFDataUpdater. Mi sembra nel # posto sbagliato questo metodo. Mi sembra che le funzionalità di # merge sono compito del building model. DXFRoomIdsResolver.resolve_rooms_id(building, None, self.get_namespace()) # Ensure floor merging is performed AFTER DXF Room_id resolution merged = building.attributes_for_source("merged") merged["floors"] = DataMerger.merge_floors( building.get("edilizia"), building.get("easyroom"), building.get("dxf")) building.save()