def _validate_building_data(self, b_dict): """ Ensure a dictionary containing building information is actually valid for updating purposes. The main goal is to validate the presence and format of b_id and/or l_b_id. If no b_id is present but a l_b_id is valid, it is set as current b_id, which ensures the building does not get discarded. Arguments: - b_dict: a dictionary representing a building Return value: True if data is valid, False otherwise """ b_id = b_dict.get("b_id", "") l_b_id = b_dict.get("l_b_id", "") if not Building.is_valid_bid(b_id): if Building.is_valid_bid(l_b_id): Logger.warning( "Invalid building id: \"{}\"".format(b_id), "- legacy id", l_b_id, "will be used instead." ) b_dict["b_id"] = l_b_id else: Logger.error( "Building discarded:", "Invalid building id", b_id, "and no valid legacy id is present" ) return False return True
def full_scan(): set_log(os.path.join("logs", "txt", "rent_control.txt")) log("Scanning rent_control.geojson", important=True) rcd = read(os.path.join("scrapers", "data", "rent_control.geojson"), isjson=True)["features"] rcp = [rc["properties"] for rc in rcd if rc["properties"]["address"]] log("using %s of %s rows (omitting blank entries)"%(len(rcp), len(rcd)), 1) blds = [] bset = set() for d in rcp: addr = d["address"] if addr not in bset: building = Building.query(Building.address == addr).get() if not building: log("Can't find '%s' -- creating new entry"%(addr,), 2) building = Building(address=addr) building.rent_control = True blds.append(building) bset.add(addr) log("saving %s rent-control buildings to db"%(len(blds),), 1) db.put_multi(blds) log("goodbye") close_log()
def test_remove_untouched_keys(self): batch_date = datetime.now() Building.remove_untouched_keys("edilizia", batch_date) query = { "edilizia" : {"$exists": True}, "edilizia.updated_at" : { "$lt" : batch_date } } action = { "$unset" : {"edilizia" : ""}, "$set" : {"deleted_edilizia" : batch_date} } options = {"multi" : True} self.pm.update.assert_called_once_with("building", query, action, options) self.pm.reset_mock() Building.remove_untouched_keys("easyroom", batch_date) query = { "easyroom" : {"$exists": True}, "easyroom.updated_at" : { "$lt" : batch_date } } action = { "$unset" : {"easyroom" : ""}, "$set" : {"deleted_easyroom" : batch_date} } options = {"multi" : True} self.pm.update.assert_called_once_with("building", query, action, options)
def test_remove_deleted_buildings(self): building_collection = Building.collection_name() Building.remove_deleted_buildings() query = { "$or" : [ { "edilizia" : {"$exists": False}, "deleted_easyroom" : { "$exists": True } }, { "easyroom" : {"$exists": False}, "deleted_edilizia" : { "$exists": True } } ] } options = {"multi" : True} self.assertEqual(self.pm.remove.call_count, 2) valid_ids = Building._pm.get_collection_ids(building_collection) query_bv = { "_id" : { "$nin" : list(valid_ids) } } call1 = call(building_collection, query, options) call2 = call("buildingview", query_bv, options) self.pm.remove.assert_has_calls([call1, call2], any_order = True)
def _validate_building_data(self, b_dict): """ Ensure a dictionary containing building information is actually valid for updating purposes. The main goal is to validate the presence and format of b_id and/or l_b_id. If no b_id is present but a l_b_id is valid, it is set as current b_id, which ensures the building does not get discarded. Arguments: - b_dict: a dictionary representing a building Return value: True if data is valid, False otherwise """ b_id = b_dict.get("b_id", "") l_b_id = b_dict.get("l_b_id", "") if not Building.is_valid_bid(b_id): if Building.is_valid_bid(l_b_id): Logger.warning("Invalid building id: \"{}\"".format(b_id), "- legacy id", l_b_id, "will be used instead.") b_dict["b_id"] = l_b_id else: Logger.error("Building discarded:", "Invalid building id", b_id, "and no valid legacy id is present") return False return True
def test_remove_deleted_buildings(self): building_collection = Building.collection_name() Building.remove_deleted_buildings() query = { "$or": [{ "edilizia": { "$exists": False }, "deleted_easyroom": { "$exists": True } }, { "easyroom": { "$exists": False }, "deleted_edilizia": { "$exists": True } }] } options = {"multi": True} self.assertEqual(self.pm.remove.call_count, 2) valid_ids = Building._pm.get_collection_ids(building_collection) query_bv = {"_id": {"$nin": list(valid_ids)}} call1 = call(building_collection, query, options) call2 = call("buildingview", query_bv, options) self.pm.remove.assert_has_calls([call1, call2], any_order=True)
def test_find_if_building_not_exists(self): b = Building.find(12) self.assertEqual(b, None) b = Building.find_or_create_by_id(12) self.assertEqual(b.is_changed(), True) self.assertEqual(b.attr("_id"), "12") self.assertEqual(b.attr("b_id"), "12")
def test_find_and_update(self): b1 = Building.find_or_create_by_id(12) b1.attr("pippo", "paperino") b1.save() b2 = Building.find_or_create_by_id(12) self.assertEqual(b1, b2)
def test_find_if_building_not_exists(self): b = Building.find(12) self.assertEqual(b, None) b = Building.find_or_create_by_id(12) self.assertEqual(b.is_changed(), True ) self.assertEqual(b.attr("_id"), "12" ) self.assertEqual(b.attr("b_id"), "12" )
def test_building_find(self): b = Building.find(12) self.assertEqual(b, None) self._pm.save("building", { "_id" : "123", "pluto" : 333 } ) b = Building.find(123) self.assertEqual(b.attr("pluto"), 333) self.assertEqual( b.is_changed() , False )
def test_building_find(self): b = Building.find(12) self.assertEqual(b, None) self._pm.save("building", {"_id": "123", "pluto": 333}) b = Building.find(123) self.assertEqual(b.attr("pluto"), 333) self.assertEqual(b.is_changed(), False)
def find_building_to_update(self, building_dict): """ Finds on database or create a Buiding object to be updated with information contained by building_dict Arguments: - building_dict: a dictionary containing the new values to be inserted on the building. Returns a Building object. This implementation ensures that documents saved with the legacy building id gets incorporated into the current building object, before actually returning it. If no legacy building is present in Database, the default behaviour is ensured: it either returns an existing Building object or creates a new one. """ b_id = building_dict["b_id"] building = Building.find_or_create_by_id(b_id) # controllo di non avere una mappatura tra b_id e l_b_id if "merged" not in building or not building["merged"].get("l_b_id", None): l_b_id = building_dict["l_b_id"] if not Building.is_valid_bid(l_b_id): return building to_merge = Building.find(l_b_id) if to_merge is not None: # abbiamo trovato un building corrispondente all'id legacy #building["dxf"] = to_merge["dxf"] building.attr("dxf", to_merge.attr("dxf")) def before_callback(b): DXFRoomIdsResolver.resolve_rooms_id(b, None, "edilizia") # Ensure floor merging is performed AFTER DXF Room_id resolution merged = b.attributes_for_source("merged") merged["floors"] = DataMerger.merge_floors( b.get("edilizia"), b.get("easyroom"), b.get("dxf") ) building.listen_once("before_save", before_callback) building.listen_once("after_save", lambda b: to_merge.destroy() ) return building
def find_building_to_update(self, building_dict): """ Finds on database or create a Buiding object to be updated with information contained by building_dict Arguments: - building_dict: a dictionary containing the new values to be inserted on the building. Returns a Building object. This implementation ensures that documents saved with the legacy building id gets incorporated into the current building object, before actually returning it. If no legacy building is present in Database, the default behaviour is ensured: it either returns an existing Building object or creates a new one. """ b_id = building_dict["b_id"] building = Building.find_or_create_by_id(b_id) # controllo di non avere una mappatura tra b_id e l_b_id if "merged" not in building or not building["merged"].get( "l_b_id", None): l_b_id = building_dict["l_b_id"] if not Building.is_valid_bid(l_b_id): return building to_merge = Building.find(l_b_id) if to_merge is not None: # abbiamo trovato un building corrispondente all'id legacy #building["dxf"] = to_merge["dxf"] building.attr("dxf", to_merge.attr("dxf")) def before_callback(b): DXFRoomIdsResolver.resolve_rooms_id(b, None, "edilizia") # Ensure floor merging is performed AFTER DXF Room_id resolution merged = b.attributes_for_source("merged") merged["floors"] = DataMerger.merge_floors( b.get("edilizia"), b.get("easyroom"), b.get("dxf")) building.listen_once("before_save", before_callback) building.listen_once("after_save", lambda b: to_merge.destroy()) return building
def export_buildings(): i = 0 blist = [] bitems = buildings.items() log("processing %s buildings"%(len(bitems),), 1) for b_id, bdata in bitems: addr = bdata["address"] building = Building.query(Building.address == addr).get() owner = Owner.query(Owner.name == bdata["owner"]).get() byear = bdata["year"] btype = bdata["building_type"] if not building: log("Can't find building '%s' -- creating new entry"%(addr,), 2) building = Building(address=addr) if owner: building.owner = owner.key if byear: building.year = byear if btype: building.building_type = btype if b_id: building.building_id = b_id if not building.zipcode: zc = _zip(bdata["zipcode"], addr) if zc: building.zipcode = zc if not building.latitude or not building.longitude: building.latitude, building.longitude = address2latlng(building.address) blist.append(building) i += 1 if not i % 100: log("processed %s buildings"%(i,), 2) log("saving buildings", 1) db.put_multi(blist)
def perform_svg_update(self, b_ids=None): """ Call the perform_maps_update on every building or on a list of buildings specified with a list of b_ids. Arguments: - b_ids: a list of string representing b_ids. Returns: None. """ query = { "$and": [{ "merged.floors": { "$exists": True } }, { "dxf.floors": { "$exists": True } }] } if b_ids: query["_id"] = {"$in": b_ids} buildings = Building.where(query) for building in buildings: self.perform_maps_update(building)
def load_soft_story_status(soft_filename): """Load buildings from soft story csv into database.""" print("Soft-Story Buildings") with open(soft_filename) as csvfile: data = csv.reader(csvfile) for row in data: address, status, latitude, longitude, liquefaction = row soft_story = SoftStory(status=status, liquefaction=liquefaction) building = Building(address=address, latitude=float(latitude), longitude=float(longitude)) soft_story.building = building liquefaction = liquefaction == "yes" print(address) print(liquefaction) # We need to add to the session or it won't ever be stored db.session.add(soft_story) print(soft_story) # Once we're done, we should commit our work db.session.commit()
def prepare_rooms(self, floor_id, rooms): """ Transform a list of rooms in a dictionary indexed by room id. Arguments: - floor_id: a string representing the floor identifier, - rooms: a list of rooms. Returns: a dictionary of rooms. Validate the r_id using Building.is_valid_rid function and discard rooms with invalid id. Create and return a dictionary of validated rooms. """ result = {} discarded_rooms = set() for r in map(self.sanitize_room, rooms): if not Building.is_valid_rid(r["r_id"]): discarded_rooms.add(r["r_id"]) continue if "cat_id" in r: r["cat_id"] = RoomCategory.get_cat_id_by_name( r.get("cat_name", "")) del r["cat_name"] r_id = r["r_id"] del r["r_id"] result[r_id] = r if discarded_rooms: Logger.warning("Rooms discarded from floor", floor_id, "for having an invalid room id:", ", ".join(discarded_rooms)) return result
def prepare_rooms(self, floor_id, rooms): """ Transform a list of rooms in a dictionary indexed by room id. Arguments: - floor_id: a string representing the floor identifier, - rooms: a list of rooms. Returns: a dictionary of rooms. Validate the r_id using Building.is_valid_rid function and discard rooms with invalid id. Create and return a dictionary of validated rooms. """ result = {} discarded_rooms = set() for r in map(self.sanitize_room, rooms): if not Building.is_valid_rid(r["r_id"]): discarded_rooms.add(r["r_id"]) continue if "cat_id" in r: r["cat_id"] = RoomCategory.get_cat_id_by_name(r.get("cat_name", "")) del r["cat_name"] r_id = r["r_id"] del r["r_id"] result[r_id] = r if discarded_rooms: Logger.warning( "Rooms discarded from floor", floor_id, "for having an invalid room id:", ", ".join(discarded_rooms) ) return result
def test_remove_untouched_keys(self): batch_date = datetime.now() Building.remove_untouched_keys("edilizia", batch_date) query = { "edilizia": { "$exists": True }, "edilizia.updated_at": { "$lt": batch_date } } action = { "$unset": { "edilizia": "" }, "$set": { "deleted_edilizia": batch_date } } options = {"multi": True} self.pm.update.assert_called_once_with("building", query, action, options) self.pm.reset_mock() Building.remove_untouched_keys("easyroom", batch_date) query = { "easyroom": { "$exists": True }, "easyroom.updated_at": { "$lt": batch_date } } action = { "$unset": { "easyroom": "" }, "$set": { "deleted_easyroom": batch_date } } options = {"multi": True} self.pm.update.assert_called_once_with("building", query, action, options)
def find_building_to_update(self, building_dict): """ Finds on database or create a Buiding object to be updated with information contained by building_dict Arguments: - building_dict: a dictionary containing the new values to be inserted on the building. Returns a Building object. Finds a building on database using b_id as identifier, otherwise looks for a building using l_b_id. If none of the above succeeds, creates a new building object to be inserted on database. """ b_id = building_dict.get("b_id", "") return (Building.find(b_id) or Building.find_by_field("merged.l_b_id", b_id) or Building({"_id": b_id}))
def _clean_unmarked_buildings(self): """ After an update batch is completed, buildings not updated are to be considered as "removed" by the supplied data source, and, hence, a logic "delete" operation is performed, by adding a delete_<namespace> key to the building object. A building is completely removed from database if every source that once stated it existed now has a deleted_<namespace> key set. What this algorithm does is to look for Buildings with updated_at field older than this last batch_date (i.e., untouched), and add the logic delete key to those buildings. Finally, it looks for buildings that had all data logically deleted and removes them physically from DB. Return value: None """ # Make sure the current update is performed as a perfect snapshot, # removing also "untouched" buildings n_removed, b_removed = Building.remove_untouched_keys( self.get_namespace(), self.batch_date ) b_removed = [ b["b_id"] for b in b_removed ] if b_removed: Logger.info( n_removed, "previously existing buildings are not present", "in this snapshot:", ", ".join(b_removed) ) n_destroyed, b_destroyed = Building.remove_deleted_buildings() b_destroyed = [ b["b_id"] for b in b_destroyed ] if n_destroyed: Logger.info( n_destroyed, "buildings were effectively removed from database", "since no data source affirms its existence:", ", ".join(b_destroyed) )
def find_building_to_update(self, building_dict): """ Finds on database or create a Buiding object to be updated with information contained by building_dict Arguments: - building_dict: a dictionary containing the new values to be inserted on the building. Returns a Building object. Finds a building on database using b_id as identifier, otherwise looks for a building using l_b_id. If none of the above succeeds, creates a new building object to be inserted on database. """ b_id = building_dict.get("b_id", "") return ( Building.find(b_id) or Building.find_by_field("merged.l_b_id", b_id) or Building( {"_id": b_id} ) )
def _clean_unmarked_buildings(self): """ After an update batch is completed, buildings not updated are to be considered as "removed" by the supplied data source, and, hence, a logic "delete" operation is performed, by adding a delete_<namespace> key to the building object. A building is completely removed from database if every source that once stated it existed now has a deleted_<namespace> key set. What this algorithm does is to look for Buildings with updated_at field older than this last batch_date (i.e., untouched), and add the logic delete key to those buildings. Finally, it looks for buildings that had all data logically deleted and removes them physically from DB. Return value: None """ # Make sure the current update is performed as a perfect snapshot, # removing also "untouched" buildings n_removed, b_removed = Building.remove_untouched_keys( self.get_namespace(), self.batch_date) b_removed = [b["b_id"] for b in b_removed] if b_removed: Logger.info(n_removed, "previously existing buildings are not present", "in this snapshot:", ", ".join(b_removed)) n_destroyed, b_destroyed = Building.remove_deleted_buildings() b_destroyed = [b["b_id"] for b in b_destroyed] if n_destroyed: Logger.info( n_destroyed, "buildings were effectively removed from database", "since no data source affirms its existence:", ", ".join(b_destroyed))
def load_buildings(): """Create fake buildings and load into database.""" # Delete all rows in table to avoid adding duplicates Building.query.delete() for i in range(0, 50): # Question, if I only want 5 buildings, how do I do that when I'm creating addresses? Random number between 1 and 5? building = Building(name=fake.company()) # Add the building to the database db.session.add(building) # Commit all additions to database db.session.commit()
def full_scan(): set_log(os.path.join("logs", "txt", "buildings.txt")) log("Scanning BlockLot_with_LatLon.csv", important=True) csv = getcsv(os.path.join("scrapers", "data", "BlockLot_with_LatLon.csv")) winner = None for row in csv: addr = ("%s %s %s"%(row[21], row[19], row[18])).strip() if not addr: continue if addr not in buildings: buildings[addr] = 0 building = Building.query(Building.address == addr).get() if not building: log("Can't find '%s' -- creating new entry"%(addr,), 2) building = Building(address=addr) # TODO: zipcode, year, building_id, owner btype = row[7].strip() lat = row[11].strip() lng = row[10].strip() if btype: building.building_type = btype if lat: building.latitude = float(lat) if lng: building.longitude = float(lng) blds.append(building) buildings[addr] += 1 if not winner or buildings[addr] > buildings[winner]: winner = addr log("winner: %s (%s). scanned lines: %s"%(winner, buildings[winner], len(csv)), 1) log("writing bcounts", 1) writejson(buildings, os.path.join("logs", "json", "bcounts")) log("saving %s buildings to db"%(len(blds),), 1) db.put_multi(blds) log("goodbye") close_log()
def create_buildings(TIME_QUANT): r = lambda: 0.8 + random.random() * 0.4 buildings = [ Building(size=(8 * r() * (max_power / 20000), 10 * r() * (max_power / 20000), 2.5 * r()), beta=15 * r(), R_st=0.5 * r(), sigma=0.03 * r(), max_power=max_power, description='wooden house', time_quant=TIME_QUANT) for max_power in [6000, 12000, 18000] ] print('sum of nominal heaters powers (losses at -30C), kW: ', sum([b.get_loss_P() * (20 - (-30)) for b in buildings]) / 1000) return buildings
def find_building_to_update(self, building_dict): """ Finds on database or create a Buiding object to be updated with information contained by building_dict Arguments: - building_dict: a dictionary containing the new values to be inserted on the building. Returns a Building object. The default implementation finds the building by its b_id or creates a new one if none exists. Subclasses may override this behavior. """ b_id = building_dict.get("b_id", "") return Building.find_or_create_by_id(b_id)
def load_buildings(): """Load buildings from u.buildings into database.""" # Delete all rows in table to avoid adding duplicates Building.query.delete() for row in open("data/u.buildings"): row = row.rstrip() building_id, name = row.split("|") building = Building(building_id=building_id, name=name) # Add the building to the database db.session.add(building) # Commit all additions to database db.session.commit()
def new_building(self, e): self.glwindow.buildings.append(Building()) self.glwindow.update() index = self.stackedBuilding.count() new = BuildingControl(index, self.glwindow, self) self.stackedBuilding.addWidget(new) self.building_selector.addItem('build_' + str(self.building_count)) self.building_count += 1 self.building_selector.setCurrentIndex(index) self.stackedBuilding.setCurrentIndex(index) self.stackedBuilding.widget(index).building_delete.connect( self.on_building_delete) self.stackedBuilding.widget(index).building_modified.connect( self.on_modified) self.glwindow.active_building = index self.glwindow.active_source = -1
def perform_svg_update(self, b_ids=None): """ Call the perform_maps_update on every building or on a list of buildings specified with a list of b_ids. Arguments: - b_ids: a list of string representing b_ids. Returns: None. """ query = {"$and": [{"merged.floors": {"$exists": True}}, {"dxf.floors": {"$exists": True}}]} if b_ids: query["_id"] = {"$in": b_ids} buildings = Building.where(query) for building in buildings: self.perform_maps_update(building)
def load_Building(): """Load building names into database.""" print "Buildings" # Delete all rows in table, so if we need to run this a second time, # we won't be trying to add duplicate users Building.query.delete() # Read building file and insert data for row in open("seed_data/building"): row = row.rstrip() bldg_id, bldg_name = row.split(",") building = Building(bldg_id=bldg_id, bldg_name=bldg_name) # We need to add to the session or it won't ever be stored db.session.add(building) # Once we're done, we should commit our work db.session.commit()
def sanitize_and_validate_floor(self, floor_id, floor_rooms): """ Intended to clean up and validating floor_ids before insertion on database. It must also Log in case the floor is invalid. Arguments: - floor_id: the original floor_id string to be sanitized. - floor_rooms: a list of dictionaries representing the floor rooms. Returns a string representing the sanitized version of the floor_id. It is a good practice for subclasses to call this parent superclass. """ valid = Building.is_valid_fid(floor_id) if not valid: rooms = [r["r_id"] for r in floor_rooms] Logger.warning("Empty floor id in building.", len(rooms), "rooms discarded:", ", ".join(rooms)) return valid
def export_parcels(): i = 0 plist = [] pitems = parcels.items() log("processing %s parcels"%(len(pitems),), 1) for p_id, pdata in pitems: if not Parcel.query(Parcel.parcel_id == p_id).get(): log("Can't find parcel '%s' -- creating new entry"%(p_id,), 2) building = Building.query(Building.building_id == pdata["building"]).get() plist.append(Parcel( parcel_id=p_id, dwelling_units=pdata["dwelling_units"], from_addr=pdata["from_addr"], to_addr=pdata["to_addr"], building=building and building.key or None )) i += 1 if not i % 100: log("processed %s parcels"%(i,), 2) log("saving parcels", 1) db.put_multi(plist)
def api_get_building_by_id(b_id): """ <h3>/buildings/<em>b_id</em></h3> <p>Returns the building with the specified b_id .</p> <h5>Parameters</h6> <p><em>b_id[string]</em> : a valid b_id</p> """ if not Building.is_valid_bid(b_id): abort(400) building = app.buildings.find_one({'_id':b_id}) if not building: building = [] for i in range(0,len(building['floors'])): building['floors'][i]['map'] = maps_url( b_id,building['floors'][i]['f_id'] ) building = prepare_building_for_api(building) return jsonify(building)
def load_tall_building(tall_filename): """Load buildings from tall buildings csv into database.""" print("Tall Buildings") with open(tall_filename) as csvfile: data = csv.reader(csvfile) for i, row in enumerate(open(tall_filename)): row = row.rstrip() name, address, at_risk, liquefaction = row.split(",") at_risk = at_risk == "yes" geolocator = GoogleV3( api_key='AIzaSyDeNiHduiEBvQI2CnzC1dis32FDktKV4eA') location = geolocator.geocode(address, timeout=180) if location == None: continue latitude = float(location.latitude) longitude = float(location.longitude) tall_building = TallBuilding(name=name, at_risk=at_risk, liquefaction=liquefaction) building = Building(address=address, latitude=latitude, longitude=longitude) tall_building.building = building # We need to add to the session or it won't ever be stored db.session.add(tall_building) print(tall_building) # Once we're done, we should commit our work db.session.commit()
def sanitize_and_validate_floor(self, floor_id, floor_rooms): """ Intended to clean up and validating floor_ids before insertion on database. It must also Log in case the floor is invalid. Arguments: - floor_id: the original floor_id string to be sanitized. - floor_rooms: a list of dictionaries representing the floor rooms. Returns a string representing the sanitized version of the floor_id. It is a good practice for subclasses to call this parent superclass. """ valid = Building.is_valid_fid(floor_id) if not valid: rooms = [ r["r_id"] for r in floor_rooms ] Logger.warning( "Empty floor id in building.", len(rooms), "rooms discarded:", ", ".join(rooms) ) return valid
def post(self): building = Building(**request.json) session.add(building) session.commit() return building.id
def tearDown(self): Building.set_pm(self.old_pm)
def setUp(self): self.old_pm = Building._pm self.pm = MagicMock() Building.set_pm(self.pm)
def update_rooms(self, rooms): """ Perform an update of room data on Database. Arguments: - rooms: a list of dictionaries representing a room data. Does not return (None). Example of a room retrieved from an Edilizia csv file: { 'room_name' : 'Aula Seminari', 'cat_name' : 'Aula', 'r_id' : 'T065', 'b_id' : '11010', 'capacity' : '52', 'l_floor' : 'T' } The b_id field will be used to locate the associated building on the database. If it is found, it will be updated with the information, otherwise a new building will be created. Note that for each building that gets updated, it's floors will be emptied before adding the rooms information. Hence no partial updates are possible: everytime this method is called, it must receive the final list of rooms for each updated floor. """ # salviamo una data di aggiornamento comune a tutti i palazzi self.batch_date = datetime.now() # ordiniamo le stanze per edificio e per piano in modo da velocizzare l'algoritmo rooms.sort(key=lambda s: (s["b_id"], s["l_floor"])) # raggruppiamo le stanze per building_id rooms = groupby(rooms, key=lambda s: s["b_id"]) # Analizziamo un building alla volta for (b_id, rooms) in rooms: # Non procedo se il b_id non è valido if not Building.is_valid_bid(b_id): Logger.error("Invalid building id: \"{}\".".format(b_id), "Rooms discarded:", ", ".join(r["r_id"] for r in rooms)) continue building = Building.find_or_create_by_id(b_id) # Lavoro principale di aggiornamento self.replace_building_rooms(building, rooms) # Non sarebbe questa gia' una politica di merge? Si tratta di usare # info di piu' sorgenti per risolvere qualcosa di DXF, ma usiamo più # sorgenti! È un tipo di merge, non un DXFDataUpdater. Mi sembra nel # posto sbagliato questo metodo. Mi sembra che le funzionalità di # merge sono compito del building model. DXFRoomIdsResolver.resolve_rooms_id(building, None, self.get_namespace()) # Ensure floor merging is performed AFTER DXF Room_id resolution merged = building.attributes_for_source("merged") merged["floors"] = DataMerger.merge_floors( building.get("edilizia"), building.get("easyroom"), building.get("dxf")) building.save()
def setUp(self): # Usato come parametro nelle chiamate self.db_building = { "dxf": { "floors": [ { "f_id": "-0.5", "rooms": {}, "unidentified_rooms": [ { # Room 1, matcha con easyroom "texts": [{ "text": "StanzaMatchata" }, { "text": "R003" }, { "text": "AUL01" }] }, { # Room 2 "texts": [{ "text": "StanzaNonMatchata" }, { "text": "R00p3" }, { "text": "WC01" }] }, { # Room 3, matcha con edilizia "texts": [{ "text": "AUL03" }, { "text": "StanzaMatchata" }, { "text": "R002" }] } ] }, { "f_id": "0.5", "rooms": {}, "unidentified_rooms": [ { # Room 1, matcha con easyroom "texts": [{ "text": "StanzaMatchata" }, { "text": "R023" }, { "text": "AUL01" }] }, { # Room 2 "texts": [{ "text": "StanzaNonMatchata" }, { "text": "R00p3" }, { "text": "WC01" }] }, { # Room 3, matcha con edilizia "texts": [{ "text": "AUL03" }, { "text": "StanzaMatchata" }, { "text": "R022" }] } ] }, ] }, "edilizia": { "floors": [{ "f_id": "0", "rooms": { "PippoPelato": {}, "R002": {}, "PippoPelato2": {} } }, { "f_id": "0.5", "rooms": { "PippoPelato": {}, "PippoPelato2": {}, "R022": {} } }] }, "easyroom": { "floors": [{ "f_id": "0", "rooms": { "PippoSdentato": {}, "PippoSdentato2": {}, "R003": {} } }, { "f_id": "1", "rooms": { "PippoSdentato": {}, "PippoSdentato2": {}, "R023": {} } }] } } # usato come parametro di comparazione per il successo dell'esecuzione # rappresenta ciò che ce ne aspettiamo alla fine dell'update su entrambe # sorgenti dati: edilizia e easyroom self.final_rooms = { # Room 1, matcha con easyroom "R003": { "texts": [{ "text": "StanzaMatchata" }, { "text": "R003" }, { "text": "AUL01" }] }, # Room 3, matcha con edilizia "R002": { "texts": [{ "text": "AUL03" }, { "text": "StanzaMatchata" }, { "text": "R002" }] }, # Room 1, matcha con easyroom "R023": { "texts": [{ "text": "StanzaMatchata" }, { "text": "R023" }, { "text": "AUL01" }] }, # Room 3, matcha con edilizia "R022": { "texts": [{ "text": "AUL03" }, { "text": "StanzaMatchata" }, { "text": "R022" }] } } self.building = Building(self.db_building)
def update_rooms(self,rooms): """ Perform an update of room data on Database. Arguments: - rooms: a list of dictionaries representing a room data. Does not return (None). Example of a room retrieved from an Edilizia csv file: { 'room_name' : 'Aula Seminari', 'cat_name' : 'Aula', 'r_id' : 'T065', 'b_id' : '11010', 'capacity' : '52', 'l_floor' : 'T' } The b_id field will be used to locate the associated building on the database. If it is found, it will be updated with the information, otherwise a new building will be created. Note that for each building that gets updated, it's floors will be emptied before adding the rooms information. Hence no partial updates are possible: everytime this method is called, it must receive the final list of rooms for each updated floor. """ # salviamo una data di aggiornamento comune a tutti i palazzi self.batch_date = datetime.now() # ordiniamo le stanze per edificio e per piano in modo da velocizzare l'algoritmo rooms.sort(key = lambda s: (s["b_id"], s["l_floor"])) # raggruppiamo le stanze per building_id rooms = groupby(rooms, key = lambda s: s["b_id"]) # Analizziamo un building alla volta for (b_id, rooms) in rooms: # Non procedo se il b_id non è valido if not Building.is_valid_bid(b_id): Logger.error( "Invalid building id: \"{}\".".format(b_id), "Rooms discarded:", ", ".join(r["r_id"] for r in rooms) ) continue building = Building.find_or_create_by_id(b_id) # Lavoro principale di aggiornamento self.replace_building_rooms(building, rooms) # Non sarebbe questa gia' una politica di merge? Si tratta di usare # info di piu' sorgenti per risolvere qualcosa di DXF, ma usiamo più # sorgenti! È un tipo di merge, non un DXFDataUpdater. Mi sembra nel # posto sbagliato questo metodo. Mi sembra che le funzionalità di # merge sono compito del building model. DXFRoomIdsResolver.resolve_rooms_id(building, None, self.get_namespace()) # Ensure floor merging is performed AFTER DXF Room_id resolution merged = building.attributes_for_source("merged") merged["floors"] = DataMerger.merge_floors( building.get("edilizia"), building.get("easyroom"), building.get("dxf") ) building.save()
self.buildings[active].move('forward') self.building_z_changed.emit(self.buildings[active].position[2]) elif key == Qt.Key_Down: self.buildings[active].move('back') self.building_z_changed.emit(self.buildings[active].position[2]) elif key == Qt.Key_Left: self.buildings[active].move('left') self.building_x_changed.emit(self.buildings[active].position[0]) elif key == Qt.Key_Right: self.buildings[active].move('right') self.building_x_changed.emit(self.buildings[active].position[0]) if __name__ == '__main__': glutInit() displayMode = GLUT_DOUBLE | GLUT_ALPHA | GLUT_DEPTH glutInitDisplayMode(displayMode) app = QApplication(sys.argv) building = Building(floors=5, rooms=5, floor_thickness=1, room_length=12, room_width=6, room_height=5, wall_thickness=0.2) glwindow = GLWindow() source = Source() glwindow.sources.append(source) # source2 = Source(pos_x=10) # glwindow.sources.append(source2) glwindow.setGeometry(200, 200, 960, 680) glwindow.show() sys.exit(app.exec_())
def scanPage(page): page = fixPage(page) dates, page = getDates(page) years = [d.year for d in dates] # throw away times times, page = page.split("\n\n", 1) zips, addrs, page = getAddrs(page) batts, alarms, page = getBattsAlarms(page) units, people, page = getDisplaced(page, len(zips)) if pnum == 13: # wow... alarms = units units = people people = [int(l) for l in page.split("\n\n")[0].split("\n")[1:]] # TODO: correlate more stuff # - injuries/deaths log("zips %s addrs %s dates %s batts %s alarms %s units %s people %s"%(len(zips), len(addrs), len(dates), len(batts), len(alarms), len(units), len(people))) if STEST: return { "zips": zips, "addrs": addrs, "dates": dates, "batts": batts, "alarms": alarms, "units": units, "people": people } for n in range(len(zips)): if years[n] not in obj: obj[years[n]] = {"total": { "fires": 0, "units": 0, "people": 0 }} if zips[n] not in obj[years[n]]: obj[years[n]][zips[n]] = { "fires": 0, "units": 0, "people": 0 } obj[years[n]]["total"]["fires"] += 1 obj[years[n]]["total"]["units"] += units[n] obj[years[n]]["total"]["people"] += people[n] obj[years[n]][zips[n]]["fires"] += 1 obj[years[n]][zips[n]]["units"] += units[n] obj[years[n]][zips[n]]["people"] += people[n] # TODO: injuries, fatalities, losses building = Building.query(Building.address == addrs[n]).get() if not building: log("no building (%s) -- creating new one!"%(addrs[n],), important=True) building = Building(address=addrs[n], zipcode=getzip(zips[n] or addr2zip(addrs[n])).key) building.latitude, building.longitude = address2latlng(addrs[n]) building.put() fires.append(Fire( building=building.key, date=dates[n], battalion=batts[n], alarms=alarms[n], units=units[n], persons=people[n] ))
def setUp(self): config_file = "config/floor_inference.json" with open(config_file) as cf: self.floor_dict = json.load(cf) self.db_building = { "_id": "33110", "merged": { "floors": [ { "rooms": { "S015": { "capacity": "20", "equipments": [], "polygon": {}, "room_name": "Aula Gamma", "accessibility": "", "cat_id": "AUL03" } }, "f_id": "-05", "walls": [], "windows": [], "unidentified_rooms": [{ "cat_id": "WC01", "polygon": {} }, { "cat_id": "WC01", "polygon": {} }, { "cat_id": "UFF01", "polygon": {} }] }, { "rooms": { "R057": { "capacity": "128", "equipments": [], "polygon": {}, "room_name": "Aula Alfa", "accessibility": "", "cat_id": "AUL01" }, "R107": { "capacity": "12", "equipments": [], "polygon": {}, "room_name": "Aula 6", "accessibility": "", "cat_id": "AUL01" }, "R013": { "capacity": "26", "equipments": [], "polygon": {}, "room_name": "Aula Delta", "accessibility": "", "cat_id": "AUL03" }, }, "f_id": "03", "walls": [], "windows": [], "unidentified_rooms": [{ "cat_id": "WC01", "polygon": {} }, { "cat_id": "STD01", "polygon": {} }, { "cat_id": "UFF01", "polygon": {} }] }, ], "building_name": "", "coordinates": { "coordinates": [9.214915, 45.454309], "type": "Point" }, "l_b_id": "5830", "address": "Via Comelico, 39, Milano", "building_number": "1" }, "edilizia": { "lat": "45.454309", "lon": "9.214915", "updated_at": "", "floors": [{ "rooms": { "S015": { "capacity": "20", "room_name": "Aula Gamma", "l_floor": "S", "cat_id": "AUL03" } }, "f_id": "-05" }, { "rooms": { "R057": { "capacity": "128", "room_name": "Aula Alfa", "l_floor": "R", "cat_id": "AUL01" }, "R008": { "capacity": "208", "room_name": "Aula Sigma e Omega", "l_floor": "R", "cat_id": "AUL03" }, "R013": { "capacity": "26", "room_name": "Aula Delta", "l_floor": "R", "cat_id": "AUL03" }, "R100": { "capacity": "12", "room_name": "Aula 4", "l_floor": "R", "cat_id": "AUL01" }, "R048": { "capacity": "192", "room_name": "Aula Beta", "l_floor": "R", "cat_id": "AUL01" }, "R099": { "capacity": "16", "room_name": "Aula 5", "l_floor": "R", "cat_id": "AUL01" }, "R107": { "capacity": "12", "room_name": "Aula 6", "l_floor": "R", "cat_id": "AUL01" } }, "f_id": "03" }], "b_id": "33110", "l_b_id": "5830", "address": "Milano - Via Comelico 39_Ed 1" } } self.building = Building(self.db_building)
total_floors = count[prefix+".total_floors"] identified_rooms = len(which[prefix+".identified_rooms"]) non_identified_rooms = len(which[prefix+".non_identified_rooms"]) total_rooms = identified_rooms + non_identified_rooms identified_rooms = data_and_percent(identified_rooms, total_rooms) non_identified_rooms = data_and_percent(non_identified_rooms, total_rooms) Logger.info("Total floors analysed : {}".format(total_floors)) Logger.info("Total rooms found on {:<8} : {}".format(source, total_rooms)) Logger.info("Found in both sources : {}".format(identified_rooms)) with Logger.info("Not found on {:<9} : {}".format(target, non_identified_rooms)): if which[prefix+".non_identified_rooms"]: unident = sorted(which[prefix+".non_identified_rooms"]) while unident: message = "" for room_id in unident[:9]: message += "{:<12}".format(room_id) Logger.info(message) unident = unident[9:] for b in Building.where({}): GeneralReport.report_building(b) GeneralReport.final_report()
def test_building_creation(self): b = Building({"_id": 123, "height": "everest"}) self.assertEqual(b.attr("b_id"), "123") self.assertEqual(b.attr("_id"), "123") self.assertEqual(b.attr("height"), "everest")
def test_building_creation(self): b = Building({ "_id" : 123, "height" : "everest" }) self.assertEqual(b.attr("b_id"), "123") self.assertEqual(b.attr("_id"), "123") self.assertEqual(b.attr("height"), "everest")