def test_removal_updates_lac(self): session = self.session keys = dict(radio=Radio.cdma, mcc=1, mnc=1, lac=1) # setup: build LAC as above self.add_line_of_cells_and_scan_lac() # confirm we got one lac = session.query(CellArea).filter(CellArea.lac == 1).first() self.assertEqual(lac.lat, 4.5) self.assertEqual(lac.lon, 4.5) self.assertEqual(lac.range, 723001) # Remove cells one by one checking that the LAC # changes shape along the way. steps = [ ((5.0, 5.0), 644242), ((5.5, 5.5), 565475), ((6.0, 6.0), 486721), ((6.5, 6.5), 408000), ((7.0, 7.0), 329334), ((7.5, 7.5), 250743), ((8.0, 8.0), 172249), ((8.5, 8.5), 93871), ((9.0, 9.0), 15630), ] for i in range(9): session.expire(lac) k = Cell.to_hashkey(cid=i, **keys) result = remove_cell.delay([k]) self.assertEqual(1, result.get()) result = scan_areas.delay() self.assertEqual(1, result.get()) lac = session.query(CellArea).filter(CellArea.lac == 1).first() self.assertEqual(lac.lat, steps[i][0][0]) self.assertEqual(lac.lon, steps[i][0][1]) self.assertEqual(lac.range, steps[i][1]) # Remove final cell, check LAC is gone k = Cell.to_hashkey(cid=9, **keys) result = remove_cell.delay([k]) self.assertEqual(1, result.get()) result = scan_areas.delay() self.assertEqual(1, result.get()) lac = session.query(CellArea).filter(CellArea.lac == 1).first() self.assertEqual(lac, None)
def test_removal_updates_lac(self): session = self.session keys = dict(radio=Radio.cdma, mcc=1, mnc=1, lac=1) # setup: build LAC as above self.add_line_of_cells_and_scan_lac() # confirm we got one lac = session.query(CellArea).filter(CellArea.lac == 1).first() self.assertEqual(lac.lat, 4.5) self.assertEqual(lac.lon, 4.5) self.assertEqual(lac.range, 723001) # Remove cells one by one checking that the LAC # changes shape along the way. steps = [ ((5.0, 5.0), 644242), ((5.5, 5.5), 565475), ((6.0, 6.0), 486721), ((6.5, 6.5), 408000), ((7.0, 7.0), 329334), ((7.5, 7.5), 250743), ((8.0, 8.0), 172249), ((8.5, 8.5), 93871), ((9.0, 9.0), 15630), ] for i in range(9): session.expire(lac) k = Cell.to_hashkey(cid=i, **keys) result = remove_cell.delay([k]) self.assertEqual(1, result.get()) result = scan_areas.delay() self.assertEqual(1, result.get()) lac = session.query(CellArea).filter(CellArea.lac == 1).first() self.assertEqual(lac.lat, steps[i][0][0]) self.assertEqual(lac.lon, steps[i][0][1]) self.assertEqual(lac.range, steps[i][1]) # Remove final cell, check LAC is gone k = Cell.to_hashkey(cid=9, **keys) result = remove_cell.delay([k]) self.assertEqual(1, result.get()) result = scan_areas.delay() self.assertEqual(1, result.get()) lac = session.query(CellArea).filter(CellArea.lac == 1).first() self.assertEqual(lac, None)
def process_reports(self, reports, userid=None): malformed_reports = 0 positions = set() observations = {'cell': [], 'wifi': []} obs_count = { 'cell': {'upload': 0, 'drop': 0}, 'wifi': {'upload': 0, 'drop': 0}, } new_station_count = {'cell': 0, 'wifi': 0} for report in reports: cell, wifi, malformed_obs = self.process_report(report) if cell: observations['cell'].extend(cell) obs_count['cell']['upload'] += len(cell) if wifi: observations['wifi'].extend(wifi) obs_count['wifi']['upload'] += len(wifi) if (cell or wifi): positions.add((report['lat'], report['lon'])) else: malformed_reports += 1 for name in ('cell', 'wifi'): obs_count[name]['drop'] += malformed_obs[name] # group by unique station key for name in ('cell', 'wifi'): station_keys = set() for obs in observations[name]: if name == 'cell': station_keys.add(Cell.to_hashkey(obs)) elif name == 'wifi': station_keys.add(obs.mac) # determine scores for stations new_station_count[name] += self.new_stations(name, station_keys) for name, queue in (('cell', self.cell_queue), ('wifi', self.wifi_queue)): if observations[name]: queue.enqueue(list(observations[name]), pipe=self.pipe) self.process_mapstat(positions) self.process_score(userid, positions, new_station_count) self.emit_stats( len(reports), malformed_reports, obs_count, )
def remove_cell(self, cell_keys): cells_removed = 0 redis_client = self.app.redis_client with self.db_session() as session: changed_lacs = set() for k in cell_keys: key = Cell.to_hashkey(k) query = session.query(Cell).filter(*Cell.joinkey(key)) cells_removed += query.delete() changed_lacs.add(CellArea.to_hashkey(key)) if changed_lacs: session.on_post_commit(enqueue_lacs, redis_client, changed_lacs, UPDATE_KEY['cell_lac']) session.commit() return cells_removed
def __call__(self, batch=10): all_observations = self.data_queue.dequeue(batch=batch) drop_counter = defaultdict(int) added = 0 new_stations = 0 station_obs = defaultdict(list) for obs in all_observations: station_obs[Cell.to_hashkey(obs)].append(obs) if not station_obs: return (0, 0) stations = {} for station in Cell.iterkeys(self.session, list(station_obs.keys())): stations[station.hashkey()] = station blocklist = self.blocklisted_stations(station_obs.keys()) new_station_values = [] changed_station_values = [] moving_stations = set() for station_key, observations in station_obs.items(): blocked, first_blocked, block = blocklist.get( station_key, (False, None, None)) if not any(observations): continue if blocked: # Drop observations for blocklisted stations. drop_counter['blocklisted'] += len(observations) continue station = stations.get(station_key, None) if station is None and not first_blocked: # We discovered an actual new never before seen station. new_stations += 1 moving, new_values, changed_values = self.new_station_values( station, station_key, first_blocked, observations) if moving: moving_stations.add((station_key, block)) else: added += len(observations) if new_values: new_station_values.append(new_values) if changed_values: changed_station_values.append(changed_values) # track potential updates to dependent areas self.add_area_update(station_key) if new_station_values: # do a batch insert of new stations stmt = Cell.__table__.insert( mysql_on_duplicate='total_measures = total_measures' # no-op ) # but limit the batch depending on each model ins_batch = Cell._insert_batch for i in range(0, len(new_station_values), ins_batch): batch_values = new_station_values[i:i + ins_batch] self.session.execute(stmt.values(batch_values)) if changed_station_values: # do a batch update of changed stations ins_batch = Cell._insert_batch for i in range(0, len(changed_station_values), ins_batch): batch_values = changed_station_values[i:i + ins_batch] self.session.bulk_update_mappings(Cell, batch_values) if self.updated_areas: self.queue_area_updates() if moving_stations: self.blocklist_stations(moving_stations) self.emit_stats(added, drop_counter) self.emit_statcounters(added, new_stations) if self.data_queue.enough_data(batch=batch): # pragma: no cover self.update_task.apply_async( kwargs={'batch': batch}, countdown=2, expires=10) return (len(stations) + len(new_station_values), len(moving_stations))